summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py10
-rw-r--r--bitbake/lib/bb/__init__.py115
-rwxr-xr-xbitbake/lib/bb/acl.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py114
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py37
-rw-r--r--bitbake/lib/bb/build.py27
-rw-r--r--bitbake/lib/bb/cache.py35
-rw-r--r--bitbake/lib/bb/checksum.py25
-rw-r--r--bitbake/lib/bb/codeparser.py97
-rw-r--r--bitbake/lib/bb/command.py39
-rw-r--r--bitbake/lib/bb/compress/lz4.py4
-rw-r--r--bitbake/lib/bb/cooker.py170
-rw-r--r--bitbake/lib/bb/cookerdata.py20
-rw-r--r--bitbake/lib/bb/data.py2
-rw-r--r--bitbake/lib/bb/data_smart.py117
-rw-r--r--bitbake/lib/bb/event.py29
-rw-r--r--bitbake/lib/bb/exceptions.py96
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py224
-rw-r--r--bitbake/lib/bb/fetch2/az.py9
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--bitbake/lib/bb/fetch2/crate.py9
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py15
-rw-r--r--bitbake/lib/bb/fetch2/git.py459
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py126
-rw-r--r--bitbake/lib/bb/fetch2/gomod.py273
-rw-r--r--bitbake/lib/bb/fetch2/local.py9
-rw-r--r--bitbake/lib/bb/fetch2/npm.py24
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py98
-rw-r--r--bitbake/lib/bb/fetch2/s3.py2
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py2
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py3
-rw-r--r--bitbake/lib/bb/fetch2/svn.py3
-rw-r--r--bitbake/lib/bb/fetch2/wget.py108
-rw-r--r--bitbake/lib/bb/msg.py4
-rw-r--r--bitbake/lib/bb/parse/__init__.py49
-rw-r--r--bitbake/lib/bb/parse/ast.py154
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py53
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py24
-rw-r--r--bitbake/lib/bb/persist_data.py271
-rw-r--r--bitbake/lib/bb/runqueue.py205
-rw-r--r--bitbake/lib/bb/server/process.py46
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py11
-rw-r--r--bitbake/lib/bb/siggen.py77
-rw-r--r--bitbake/lib/bb/tests/codeparser.py40
-rw-r--r--bitbake/lib/bb/tests/compression.py4
-rw-r--r--bitbake/lib/bb/tests/data.py49
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php3528
-rw-r--r--bitbake/lib/bb/tests/fetch.py1102
-rw-r--r--bitbake/lib/bb/tests/parse.py142
-rw-r--r--bitbake/lib/bb/tests/persist_data.py129
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb0
-rw-r--r--bitbake/lib/bb/tests/runqueue.py11
-rw-r--r--bitbake/lib/bb/tests/utils.py19
-rw-r--r--bitbake/lib/bb/tinfoil.py183
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py5
-rw-r--r--bitbake/lib/bb/ui/knotty.py53
-rw-r--r--bitbake/lib/bb/ui/teamcity.py5
-rw-r--r--bitbake/lib/bb/ui/uihelper.py2
-rw-r--r--bitbake/lib/bb/utils.py673
62 files changed, 7079 insertions, 2075 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 76bc08a3ea..4af03c54ad 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -36,8 +36,9 @@ class COWDictMeta(COWMeta):
36 __marker__ = tuple() 36 __marker__ = tuple()
37 37
38 def __str__(cls): 38 def __str__(cls):
39 # FIXME: I have magic numbers! 39 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
40 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 40 keys = set(cls.__dict__.keys()) - ignored_keys
41 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
41 42
42 __repr__ = __str__ 43 __repr__ = __str__
43 44
@@ -161,8 +162,9 @@ class COWDictMeta(COWMeta):
161 162
162class COWSetMeta(COWDictMeta): 163class COWSetMeta(COWDictMeta):
163 def __str__(cls): 164 def __str__(cls):
164 # FIXME: I have magic numbers! 165 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
165 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 166 keys = set(cls.__dict__.keys()) - ignored_keys
167 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
166 168
167 __repr__ = __str__ 169 __repr__ = __str__
168 170
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 15013540c2..bf4c54d829 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,11 +9,11 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "2.9.0" 12__version__ = "2.15.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 8, 0): 15if sys.version_info < (3, 9, 0):
16 raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.9.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0): 18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work" 19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
@@ -36,6 +36,7 @@ class BBHandledException(Exception):
36 36
37import os 37import os
38import logging 38import logging
39from collections import namedtuple
39 40
40 41
41class NullHandler(logging.Handler): 42class NullHandler(logging.Handler):
@@ -103,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
103 self.setup_bblogger(logger.name) 104 self.setup_bblogger(logger.name)
104 super().__init__(logger, *args, **kwargs) 105 super().__init__(logger, *args, **kwargs)
105 106
106 if sys.version_info < (3, 6):
107 # These properties were added in Python 3.6. Add them in older versions
108 # for compatibility
109 @property
110 def manager(self):
111 return self.logger.manager
112
113 @manager.setter
114 def manager(self, value):
115 self.logger.manager = value
116
117 @property
118 def name(self):
119 return self.logger.name
120
121 def __repr__(self):
122 logger = self.logger
123 level = logger.getLevelName(logger.getEffectiveLevel())
124 return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
125
126logging.LoggerAdapter = BBLoggerAdapter 107logging.LoggerAdapter = BBLoggerAdapter
127 108
128logger = logging.getLogger("BitBake") 109logger = logging.getLogger("BitBake")
@@ -148,9 +129,25 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
148 129
149# Messaging convenience functions 130# Messaging convenience functions
150def plain(*args): 131def plain(*args):
132 """
133 Prints a message at "plain" level (higher level than a ``bb.note()``).
134
135 Arguments:
136
137 - ``args``: one or more strings to print.
138 """
151 mainlogger.plain(''.join(args)) 139 mainlogger.plain(''.join(args))
152 140
153def debug(lvl, *args): 141def debug(lvl, *args):
142 """
143 Prints a debug message.
144
145 Arguments:
146
147 - ``lvl``: debug level. Higher value increases the debug level
148 (determined by ``bitbake -D``).
149 - ``args``: one or more strings to print.
150 """
154 if isinstance(lvl, str): 151 if isinstance(lvl, str):
155 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) 152 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
156 args = (lvl,) + args 153 args = (lvl,) + args
@@ -158,33 +155,81 @@ def debug(lvl, *args):
158 mainlogger.bbdebug(lvl, ''.join(args)) 155 mainlogger.bbdebug(lvl, ''.join(args))
159 156
160def note(*args): 157def note(*args):
158 """
159 Prints a message at "note" level.
160
161 Arguments:
162
163 - ``args``: one or more strings to print.
164 """
161 mainlogger.info(''.join(args)) 165 mainlogger.info(''.join(args))
162 166
163#
164# A higher prioity note which will show on the console but isn't a warning
165#
166# Something is happening the user should be aware of but they probably did
167# something to make it happen
168#
169def verbnote(*args): 167def verbnote(*args):
168 """
169 A higher priority note which will show on the console but isn't a warning.
170
171 Use in contexts when something is happening the user should be aware of but
172 they probably did something to make it happen.
173
174 Arguments:
175
176 - ``args``: one or more strings to print.
177 """
170 mainlogger.verbnote(''.join(args)) 178 mainlogger.verbnote(''.join(args))
171 179
172# 180#
173# Warnings - things the user likely needs to pay attention to and fix 181# Warnings - things the user likely needs to pay attention to and fix
174# 182#
175def warn(*args): 183def warn(*args):
184 """
185 Prints a warning message.
186
187 Arguments:
188
189 - ``args``: one or more strings to print.
190 """
176 mainlogger.warning(''.join(args)) 191 mainlogger.warning(''.join(args))
177 192
178def warnonce(*args): 193def warnonce(*args):
194 """
195 Prints a warning message like ``bb.warn()``, but only prints the message
196 once.
197
198 Arguments:
199
200 - ``args``: one or more strings to print.
201 """
179 mainlogger.warnonce(''.join(args)) 202 mainlogger.warnonce(''.join(args))
180 203
181def error(*args, **kwargs): 204def error(*args, **kwargs):
205 """
206 Prints an error message.
207
208 Arguments:
209
210 - ``args``: one or more strings to print.
211 """
182 mainlogger.error(''.join(args), extra=kwargs) 212 mainlogger.error(''.join(args), extra=kwargs)
183 213
184def erroronce(*args): 214def erroronce(*args):
215 """
216 Prints an error message like ``bb.error()``, but only prints the message
217 once.
218
219 Arguments:
220
221 - ``args``: one or more strings to print.
222 """
185 mainlogger.erroronce(''.join(args)) 223 mainlogger.erroronce(''.join(args))
186 224
187def fatal(*args, **kwargs): 225def fatal(*args, **kwargs):
226 """
227 Prints an error message and stops the BitBake execution.
228
229 Arguments:
230
231 - ``args``: one or more strings to print.
232 """
188 mainlogger.critical(''.join(args), extra=kwargs) 233 mainlogger.critical(''.join(args), extra=kwargs)
189 raise BBHandledException() 234 raise BBHandledException()
190 235
@@ -213,7 +258,6 @@ def deprecated(func, name=None, advice=""):
213# For compatibility 258# For compatibility
214def deprecate_import(current, modulename, fromlist, renames = None): 259def deprecate_import(current, modulename, fromlist, renames = None):
215 """Import objects from one module into another, wrapping them with a DeprecationWarning""" 260 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
216 import sys
217 261
218 module = __import__(modulename, fromlist = fromlist) 262 module = __import__(modulename, fromlist = fromlist)
219 for position, objname in enumerate(fromlist): 263 for position, objname in enumerate(fromlist):
@@ -227,3 +271,14 @@ def deprecate_import(current, modulename, fromlist, renames = None):
227 271
228 setattr(sys.modules[current], newname, newobj) 272 setattr(sys.modules[current], newname, newobj)
229 273
274TaskData = namedtuple("TaskData", [
275 "pn",
276 "taskname",
277 "fn",
278 "deps",
279 "provides",
280 "taskhash",
281 "unihash",
282 "hashfn",
283 "taskhash_deps",
284])
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
index 0f41b275cf..e9dbdb617f 100755
--- a/bitbake/lib/bb/acl.py
+++ b/bitbake/lib/bb/acl.py
@@ -195,8 +195,6 @@ class ACL(object):
195 195
196def main(): 196def main():
197 import argparse 197 import argparse
198 import pwd
199 import grp
200 from pathlib import Path 198 from pathlib import Path
201 199
202 parser = argparse.ArgumentParser() 200 parser = argparse.ArgumentParser()
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
index 639e1607f8..a4371643d7 100644
--- a/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -5,7 +5,7 @@
5# 5#
6 6
7 7
8from .client import AsyncClient, Client, ClientPool 8from .client import AsyncClient, Client
9from .serv import AsyncServer, AsyncServerConnection 9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK 10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import ( 11from .exceptions import (
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index a350b4fb12..17b72033b9 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -24,6 +24,12 @@ ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1 24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2 25ADDR_TYPE_WS = 2
26 26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
32
27def parse_address(addr): 33def parse_address(addr):
28 if addr.startswith(UNIX_PREFIX): 34 if addr.startswith(UNIX_PREFIX):
29 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) 35 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
@@ -39,6 +45,7 @@ def parse_address(addr):
39 45
40 return (ADDR_TYPE_TCP, (host, int(port))) 46 return (ADDR_TYPE_TCP, (host, int(port)))
41 47
48
42class AsyncClient(object): 49class AsyncClient(object):
43 def __init__( 50 def __init__(
44 self, 51 self,
@@ -86,8 +93,35 @@ class AsyncClient(object):
86 async def connect_websocket(self, uri): 93 async def connect_websocket(self, uri):
87 import websockets 94 import websockets
88 95
96 try:
97 version = tuple(
98 int(v)
99 for v in websockets.__version__.split(".")[
100 0 : len(WEBSOCKETS_MIN_VERSION)
101 ]
102 )
103 except ValueError:
104 raise ImportError(
105 f"Unable to parse websockets version '{websockets.__version__}'"
106 )
107
108 if version < WEBSOCKETS_MIN_VERSION:
109 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
110 raise ImportError(
111 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
112 )
113
89 async def connect_sock(): 114 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None) 115 try:
116 websocket = await websockets.connect(
117 uri,
118 ping_interval=None,
119 open_timeout=self.timeout,
120 )
121 except asyncio.exceptions.TimeoutError:
122 raise ConnectionError("Timeout while connecting to websocket")
123 except (OSError, websockets.InvalidHandshake, websockets.InvalidURI) as exc:
124 raise ConnectionError(f"Could not connect to websocket: {exc}") from exc
91 return WebsocketConnection(websocket, self.timeout) 125 return WebsocketConnection(websocket, self.timeout)
92 126
93 self._connect_sock = connect_sock 127 self._connect_sock = connect_sock
@@ -225,85 +259,9 @@ class Client(object):
225 def close(self): 259 def close(self):
226 if self.loop: 260 if self.loop:
227 self.loop.run_until_complete(self.client.close()) 261 self.loop.run_until_complete(self.client.close())
228 if sys.version_info >= (3, 6):
229 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
230 self.loop.close()
231 self.loop = None
232
233 def __enter__(self):
234 return self
235
236 def __exit__(self, exc_type, exc_value, traceback):
237 self.close()
238 return False
239
240
241class ClientPool(object):
242 def __init__(self, max_clients):
243 self.avail_clients = []
244 self.num_clients = 0
245 self.max_clients = max_clients
246 self.loop = None
247 self.client_condition = None
248
249 @abc.abstractmethod
250 async def _new_client(self):
251 raise NotImplementedError("Must be implemented in derived class")
252
253 def close(self):
254 if self.client_condition:
255 self.client_condition = None
256
257 if self.loop:
258 self.loop.run_until_complete(self.__close_clients())
259 self.loop.run_until_complete(self.loop.shutdown_asyncgens()) 262 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
260 self.loop.close() 263 self.loop.close()
261 self.loop = None 264 self.loop = None
262
263 def run_tasks(self, tasks):
264 if not self.loop:
265 self.loop = asyncio.new_event_loop()
266
267 thread = Thread(target=self.__thread_main, args=(tasks,))
268 thread.start()
269 thread.join()
270
271 @contextlib.asynccontextmanager
272 async def get_client(self):
273 async with self.client_condition:
274 if self.avail_clients:
275 client = self.avail_clients.pop()
276 elif self.num_clients < self.max_clients:
277 self.num_clients += 1
278 client = await self._new_client()
279 else:
280 while not self.avail_clients:
281 await self.client_condition.wait()
282 client = self.avail_clients.pop()
283
284 try:
285 yield client
286 finally:
287 async with self.client_condition:
288 self.avail_clients.append(client)
289 self.client_condition.notify()
290
291 def __thread_main(self, tasks):
292 async def process_task(task):
293 async with self.get_client() as client:
294 await task(client)
295
296 asyncio.set_event_loop(self.loop)
297 if not self.client_condition:
298 self.client_condition = asyncio.Condition()
299 tasks = [process_task(t) for t in tasks]
300 self.loop.run_until_complete(asyncio.gather(*tasks))
301
302 async def __close_clients(self):
303 for c in self.avail_clients:
304 await c.close()
305 self.avail_clients = []
306 self.num_clients = 0
307 265
308 def __enter__(self): 266 def __enter__(self):
309 return self 267 return self
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
index a66117acad..667217c5c1 100644
--- a/bitbake/lib/bb/asyncrpc/serv.py
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -138,14 +138,20 @@ class StreamServer(object):
138 138
139 139
140class TCPStreamServer(StreamServer): 140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger): 141 def __init__(self, host, port, handler, logger, *, reuseport=False):
142 super().__init__(handler, logger) 142 super().__init__(handler, logger)
143 self.host = host 143 self.host = host
144 self.port = port 144 self.port = port
145 self.reuseport = reuseport
145 146
146 def start(self, loop): 147 def start(self, loop):
147 self.server = loop.run_until_complete( 148 self.server = loop.run_until_complete(
148 asyncio.start_server(self.handle_stream_client, self.host, self.port) 149 asyncio.start_server(
150 self.handle_stream_client,
151 self.host,
152 self.port,
153 reuse_port=self.reuseport,
154 )
149 ) 155 )
150 156
151 for s in self.server.sockets: 157 for s in self.server.sockets:
@@ -209,11 +215,12 @@ class UnixStreamServer(StreamServer):
209 215
210 216
211class WebsocketsServer(object): 217class WebsocketsServer(object):
212 def __init__(self, host, port, handler, logger): 218 def __init__(self, host, port, handler, logger, *, reuseport=False):
213 self.host = host 219 self.host = host
214 self.port = port 220 self.port = port
215 self.handler = handler 221 self.handler = handler
216 self.logger = logger 222 self.logger = logger
223 self.reuseport = reuseport
217 224
218 def start(self, loop): 225 def start(self, loop):
219 import websockets.server 226 import websockets.server
@@ -224,6 +231,7 @@ class WebsocketsServer(object):
224 self.host, 231 self.host,
225 self.port, 232 self.port,
226 ping_interval=None, 233 ping_interval=None,
234 reuse_port=self.reuseport,
227 ) 235 )
228 ) 236 )
229 237
@@ -262,14 +270,26 @@ class AsyncServer(object):
262 self.loop = None 270 self.loop = None
263 self.run_tasks = [] 271 self.run_tasks = []
264 272
265 def start_tcp_server(self, host, port): 273 def start_tcp_server(self, host, port, *, reuseport=False):
266 self.server = TCPStreamServer(host, port, self._client_handler, self.logger) 274 self.server = TCPStreamServer(
275 host,
276 port,
277 self._client_handler,
278 self.logger,
279 reuseport=reuseport,
280 )
267 281
268 def start_unix_server(self, path): 282 def start_unix_server(self, path):
269 self.server = UnixStreamServer(path, self._client_handler, self.logger) 283 self.server = UnixStreamServer(path, self._client_handler, self.logger)
270 284
271 def start_websocket_server(self, host, port): 285 def start_websocket_server(self, host, port, reuseport=False):
272 self.server = WebsocketsServer(host, port, self._client_handler, self.logger) 286 self.server = WebsocketsServer(
287 host,
288 port,
289 self._client_handler,
290 self.logger,
291 reuseport=reuseport,
292 )
273 293
274 async def _client_handler(self, socket): 294 async def _client_handler(self, socket):
275 address = socket.address 295 address = socket.address
@@ -368,8 +388,7 @@ class AsyncServer(object):
368 388
369 self._serve_forever(tasks) 389 self._serve_forever(tasks)
370 390
371 if sys.version_info >= (3, 6): 391 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
372 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
373 self.loop.close() 392 self.loop.close()
374 393
375 queue = multiprocessing.Queue() 394 queue = multiprocessing.Queue()
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 44d08f5c55..40839a81b5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None):
197 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
198 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
199 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
200 202
201 if flags and dirs is None: 203 if flags and dirs is None:
202 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
@@ -395,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d):
395 # Use specified regex 397 # Use specified regex
396 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) 398 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
397 elif progress.startswith("custom:"): 399 elif progress.startswith("custom:"):
398 # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ 400 # Use a custom progress handler that was injected via other means
399 import functools 401 import functools
400 from types import ModuleType 402 from types import ModuleType
401 403
@@ -741,7 +743,7 @@ def _exec_task(fn, task, d, quieterr):
741 743
742 if quieterr: 744 if quieterr:
743 if not handled: 745 if not handled:
744 logger.warning(repr(exc)) 746 logger.warning(str(exc))
745 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 747 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
746 else: 748 else:
747 errprinted = errchk.triggered 749 errprinted = errchk.triggered
@@ -750,7 +752,7 @@ def _exec_task(fn, task, d, quieterr):
750 if verboseStdoutLogging or handled: 752 if verboseStdoutLogging or handled:
751 errprinted = True 753 errprinted = True
752 if not handled: 754 if not handled:
753 logger.error(repr(exc)) 755 logger.error(str(exc))
754 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 756 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
755 return 1 757 return 1
756 758
@@ -930,9 +932,13 @@ def add_tasks(tasklist, d):
930 # don't assume holding a reference 932 # don't assume holding a reference
931 d.setVar('_task_deps', task_deps) 933 d.setVar('_task_deps', task_deps)
932 934
935def ensure_task_prefix(name):
936 if name[:3] != "do_":
937 name = "do_" + name
938 return name
939
933def addtask(task, before, after, d): 940def addtask(task, before, after, d):
934 if task[:3] != "do_": 941 task = ensure_task_prefix(task)
935 task = "do_" + task
936 942
937 d.setVarFlag(task, "task", 1) 943 d.setVarFlag(task, "task", 1)
938 bbtasks = d.getVar('__BBTASKS', False) or [] 944 bbtasks = d.getVar('__BBTASKS', False) or []
@@ -944,19 +950,20 @@ def addtask(task, before, after, d):
944 if after is not None: 950 if after is not None:
945 # set up deps for function 951 # set up deps for function
946 for entry in after.split(): 952 for entry in after.split():
953 entry = ensure_task_prefix(entry)
947 if entry not in existing: 954 if entry not in existing:
948 existing.append(entry) 955 existing.append(entry)
949 d.setVarFlag(task, "deps", existing) 956 d.setVarFlag(task, "deps", existing)
950 if before is not None: 957 if before is not None:
951 # set up things that depend on this func 958 # set up things that depend on this func
952 for entry in before.split(): 959 for entry in before.split():
960 entry = ensure_task_prefix(entry)
953 existing = d.getVarFlag(entry, "deps", False) or [] 961 existing = d.getVarFlag(entry, "deps", False) or []
954 if task not in existing: 962 if task not in existing:
955 d.setVarFlag(entry, "deps", [task] + existing) 963 d.setVarFlag(entry, "deps", [task] + existing)
956 964
957def deltask(task, d): 965def deltask(task, d):
958 if task[:3] != "do_": 966 task = ensure_task_prefix(task)
959 task = "do_" + task
960 967
961 bbtasks = d.getVar('__BBTASKS', False) or [] 968 bbtasks = d.getVar('__BBTASKS', False) or []
962 if task in bbtasks: 969 if task in bbtasks:
@@ -1021,3 +1028,9 @@ def tasksbetween(task_start, task_end, d):
1021 chain.pop() 1028 chain.pop()
1022 follow_chain(task_start, task_end) 1029 follow_chain(task_start, task_end)
1023 return outtasks 1030 return outtasks
1031
1032def listtasks(d):
1033 """
1034 Return the list of tasks in the current recipe.
1035 """
1036 return tuple(d.getVar('__BBTASKS', False) or ())
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index 18d5574a31..2361c5684d 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -28,7 +28,7 @@ import shutil
28 28
29logger = logging.getLogger("BitBake.Cache") 29logger = logging.getLogger("BitBake.Cache")
30 30
31__cache_version__ = "155" 31__cache_version__ = "156"
32 32
33def getCacheFile(path, filename, mc, data_hash): 33def getCacheFile(path, filename, mc, data_hash):
34 mcspec = '' 34 mcspec = ''
@@ -395,7 +395,7 @@ class Cache(object):
395 # It will be used later for deciding whether we 395 # It will be used later for deciding whether we
396 # need extra cache file dump/load support 396 # need extra cache file dump/load support
397 self.mc = mc 397 self.mc = mc
398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) 398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger)
399 self.caches_array = caches_array 399 self.caches_array = caches_array
400 self.cachedir = self.data.getVar("CACHE") 400 self.cachedir = self.data.getVar("CACHE")
401 self.clean = set() 401 self.clean = set()
@@ -441,7 +441,7 @@ class Cache(object):
441 else: 441 else:
442 symlink = os.path.join(self.cachedir, "bb_cache.dat") 442 symlink = os.path.join(self.cachedir, "bb_cache.dat")
443 443
444 if os.path.exists(symlink): 444 if os.path.exists(symlink) or os.path.islink(symlink):
445 bb.utils.remove(symlink) 445 bb.utils.remove(symlink)
446 try: 446 try:
447 os.symlink(os.path.basename(self.cachefile), symlink) 447 os.symlink(os.path.basename(self.cachefile), symlink)
@@ -779,25 +779,6 @@ class MulticonfigCache(Mapping):
779 for k in self.__caches: 779 for k in self.__caches:
780 yield k 780 yield k
781 781
782def init(cooker):
783 """
784 The Objective: Cache the minimum amount of data possible yet get to the
785 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
786
787 To do this, we intercept getVar calls and only cache the variables we see
788 being accessed. We rely on the cache getVar calls being made for all
789 variables bitbake might need to use to reach this stage. For each cached
790 file we need to track:
791
792 * Its mtime
793 * The mtimes of all its dependencies
794 * Whether it caused a parse.SkipRecipe exception
795
796 Files causing parsing errors are evicted from the cache.
797
798 """
799 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
800
801 782
802class CacheData(object): 783class CacheData(object):
803 """ 784 """
@@ -866,6 +847,16 @@ class MultiProcessCache(object):
866 data = [{}] 847 data = [{}]
867 return data 848 return data
868 849
850 def clear_cache(self):
851 if not self.cachefile:
852 bb.fatal("Can't clear invalid cachefile")
853
854 self.cachedata = self.create_cachedata()
855 self.cachedata_extras = self.create_cachedata()
856 with bb.utils.fileslocked([self.cachefile + ".lock"]):
857 bb.utils.remove(self.cachefile)
858 bb.utils.remove(self.cachefile + "-*")
859
869 def save_extras(self): 860 def save_extras(self):
870 if not self.cachefile: 861 if not self.cachefile:
871 return 862 return
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
index 557793d366..3fb39a303e 100644
--- a/bitbake/lib/bb/checksum.py
+++ b/bitbake/lib/bb/checksum.py
@@ -142,3 +142,28 @@ class FileChecksumCache(MultiProcessCache):
142 142
143 checksums.sort(key=operator.itemgetter(1)) 143 checksums.sort(key=operator.itemgetter(1))
144 return checksums 144 return checksums
145
146class RevisionsCache(MultiProcessCache):
147 cache_file_name = "local_srcrevisions.dat"
148 CACHE_VERSION = 1
149
150 def __init__(self):
151 MultiProcessCache.__init__(self)
152
153 def get_revs(self):
154 return self.cachedata[0]
155
156 def get_rev(self, k):
157 if k in self.cachedata_extras[0]:
158 return self.cachedata_extras[0][k]
159 if k in self.cachedata[0]:
160 return self.cachedata[0][k]
161 return None
162
163 def set_rev(self, k, v):
164 self.cachedata[0][k] = v
165 self.cachedata_extras[0][k] = v
166
167 def merge_data(self, source, dest):
168 for h in source[0]:
169 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 2e8b7ced3c..4f70cf7fe7 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -69,12 +69,25 @@ def add_module_functions(fn, functions, namespace):
69 name = "%s.%s" % (namespace, f) 69 name = "%s.%s" % (namespace, f)
70 parser = PythonParser(name, logger) 70 parser = PythonParser(name, logger)
71 try: 71 try:
72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f) 72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f, func=functions[f])
73 #bb.warn("Cached %s" % f) 73 #bb.warn("Cached %s" % f)
74 except KeyError: 74 except KeyError:
75 lines, lineno = inspect.getsourcelines(functions[f]) 75 try:
76 targetfn = inspect.getsourcefile(functions[f])
77 except TypeError:
78 # Builtin
79 continue
80 if fn != targetfn:
81 # Skip references to other modules outside this file
82 #bb.warn("Skipping %s" % name)
83 continue
84 try:
85 lines, lineno = inspect.getsourcelines(functions[f])
86 except TypeError:
87 # Builtin
88 continue
76 src = "".join(lines) 89 src = "".join(lines)
77 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f) 90 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f, func=functions[f])
78 #bb.warn("Not cached %s" % f) 91 #bb.warn("Not cached %s" % f)
79 execs = parser.execs.copy() 92 execs = parser.execs.copy()
80 # Expand internal module exec references 93 # Expand internal module exec references
@@ -82,14 +95,17 @@ def add_module_functions(fn, functions, namespace):
82 if e in functions: 95 if e in functions:
83 execs.remove(e) 96 execs.remove(e)
84 execs.add(namespace + "." + e) 97 execs.add(namespace + "." + e)
85 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()] 98 visitorcode = None
99 if hasattr(functions[f], 'visitorcode'):
100 visitorcode = getattr(functions[f], "visitorcode")
101 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode]
86 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains)) 102 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
87 103
88def update_module_dependencies(d): 104def update_module_dependencies(d):
89 for mod in modulecode_deps: 105 for mod in modulecode_deps:
90 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split()) 106 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
91 if excludes: 107 if excludes:
92 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]] 108 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]]
93 109
94# A custom getstate/setstate using tuples is actually worth 15% cachesize by 110# A custom getstate/setstate using tuples is actually worth 15% cachesize by
95# avoiding duplication of the attribute names! 111# avoiding duplication of the attribute names!
@@ -112,21 +128,22 @@ class SetCache(object):
112codecache = SetCache() 128codecache = SetCache()
113 129
114class pythonCacheLine(object): 130class pythonCacheLine(object):
115 def __init__(self, refs, execs, contains): 131 def __init__(self, refs, execs, contains, extra):
116 self.refs = codecache.internSet(refs) 132 self.refs = codecache.internSet(refs)
117 self.execs = codecache.internSet(execs) 133 self.execs = codecache.internSet(execs)
118 self.contains = {} 134 self.contains = {}
119 for c in contains: 135 for c in contains:
120 self.contains[c] = codecache.internSet(contains[c]) 136 self.contains[c] = codecache.internSet(contains[c])
137 self.extra = extra
121 138
122 def __getstate__(self): 139 def __getstate__(self):
123 return (self.refs, self.execs, self.contains) 140 return (self.refs, self.execs, self.contains, self.extra)
124 141
125 def __setstate__(self, state): 142 def __setstate__(self, state):
126 (refs, execs, contains) = state 143 (refs, execs, contains, extra) = state
127 self.__init__(refs, execs, contains) 144 self.__init__(refs, execs, contains, extra)
128 def __hash__(self): 145 def __hash__(self):
129 l = (hash(self.refs), hash(self.execs)) 146 l = (hash(self.refs), hash(self.execs), hash(self.extra))
130 for c in sorted(self.contains.keys()): 147 for c in sorted(self.contains.keys()):
131 l = l + (c, hash(self.contains[c])) 148 l = l + (c, hash(self.contains[c]))
132 return hash(l) 149 return hash(l)
@@ -155,7 +172,7 @@ class CodeParserCache(MultiProcessCache):
155 # so that an existing cache gets invalidated. Additionally you'll need 172 # so that an existing cache gets invalidated. Additionally you'll need
156 # to increment __cache_version__ in cache.py in order to ensure that old 173 # to increment __cache_version__ in cache.py in order to ensure that old
157 # recipe caches don't trigger "Taskhash mismatch" errors. 174 # recipe caches don't trigger "Taskhash mismatch" errors.
158 CACHE_VERSION = 11 175 CACHE_VERSION = 14
159 176
160 def __init__(self): 177 def __init__(self):
161 MultiProcessCache.__init__(self) 178 MultiProcessCache.__init__(self)
@@ -169,8 +186,8 @@ class CodeParserCache(MultiProcessCache):
169 self.pythoncachelines = {} 186 self.pythoncachelines = {}
170 self.shellcachelines = {} 187 self.shellcachelines = {}
171 188
172 def newPythonCacheLine(self, refs, execs, contains): 189 def newPythonCacheLine(self, refs, execs, contains, extra):
173 cacheline = pythonCacheLine(refs, execs, contains) 190 cacheline = pythonCacheLine(refs, execs, contains, extra)
174 h = hash(cacheline) 191 h = hash(cacheline)
175 if h in self.pythoncachelines: 192 if h in self.pythoncachelines:
176 return self.pythoncachelines[h] 193 return self.pythoncachelines[h]
@@ -255,7 +272,15 @@ class PythonParser():
255 272
256 def visit_Call(self, node): 273 def visit_Call(self, node):
257 name = self.called_node_name(node.func) 274 name = self.called_node_name(node.func)
258 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): 275 if name and name in modulecode_deps and modulecode_deps[name][5]:
276 visitorcode = modulecode_deps[name][5]
277 contains, execs, warn = visitorcode(name, node.args)
278 for i in contains:
279 self.contains[i] = contains[i]
280 self.execs |= execs
281 if warn:
282 self.warn(node.func, warn)
283 elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
259 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str): 284 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
260 varname = node.args[0].value 285 varname = node.args[0].value
261 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant): 286 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
@@ -323,7 +348,7 @@ class PythonParser():
323 # For the python module code it is expensive to have the function text so it is 348 # For the python module code it is expensive to have the function text so it is
324 # uses a different fixedhash to cache against. We can take the hit on obtaining the 349 # uses a different fixedhash to cache against. We can take the hit on obtaining the
325 # text if it isn't in the cache. 350 # text if it isn't in the cache.
326 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None): 351 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None, func=None):
327 if not fixedhash and (not node or not node.strip()): 352 if not fixedhash and (not node or not node.strip()):
328 return 353 return
329 354
@@ -338,6 +363,7 @@ class PythonParser():
338 self.contains = {} 363 self.contains = {}
339 for i in codeparsercache.pythoncache[h].contains: 364 for i in codeparsercache.pythoncache[h].contains:
340 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) 365 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
366 self.extra = codeparsercache.pythoncache[h].extra
341 return 367 return
342 368
343 if h in codeparsercache.pythoncacheextras: 369 if h in codeparsercache.pythoncacheextras:
@@ -346,6 +372,7 @@ class PythonParser():
346 self.contains = {} 372 self.contains = {}
347 for i in codeparsercache.pythoncacheextras[h].contains: 373 for i in codeparsercache.pythoncacheextras[h].contains:
348 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) 374 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
375 self.extra = codeparsercache.pythoncacheextras[h].extra
349 return 376 return
350 377
351 if fixedhash and not node: 378 if fixedhash and not node:
@@ -363,9 +390,16 @@ class PythonParser():
363 if n.__class__.__name__ == "Call": 390 if n.__class__.__name__ == "Call":
364 self.visit_Call(n) 391 self.visit_Call(n)
365 392
393 if func is not None:
394 self.references |= getattr(func, "bb_vardeps", set())
395 self.references -= getattr(func, "bb_vardepsexclude", set())
396
366 self.execs.update(self.var_execs) 397 self.execs.update(self.var_execs)
398 self.extra = None
399 if fixedhash:
400 self.extra = bbhash(str(node))
367 401
368 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) 402 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra)
369 403
370class ShellParser(): 404class ShellParser():
371 def __init__(self, name, log): 405 def __init__(self, name, log):
@@ -484,19 +518,34 @@ class ShellParser():
484 """ 518 """
485 519
486 words = list(words) 520 words = list(words)
487 for word in list(words): 521 for word in words:
488 wtree = pyshlex.make_wordtree(word[1]) 522 wtree = pyshlex.make_wordtree(word[1])
489 for part in wtree: 523 for part in wtree:
490 if not isinstance(part, list): 524 if not isinstance(part, list):
491 continue 525 continue
492 526
493 if part[0] in ('`', '$('): 527 candidates = [part]
494 command = pyshlex.wordtree_as_string(part[1:-1]) 528
495 self._parse_shell(command) 529 # If command is of type:
496 530 #
497 if word[0] in ("cmd_name", "cmd_word"): 531 # var="... $(cmd [...]) ..."
498 if word in words: 532 #
499 words.remove(word) 533 # Then iterate on what's between the quotes and if we find a
534 # list, make that what we check for below.
535 if len(part) >= 3 and part[0] == '"':
536 for p in part[1:-1]:
537 if isinstance(p, list):
538 candidates.append(p)
539
540 for candidate in candidates:
541 if len(candidate) >= 2:
542 if candidate[0] in ('`', '$('):
543 command = pyshlex.wordtree_as_string(candidate[1:-1])
544 self._parse_shell(command)
545
546 if word[0] in ("cmd_name", "cmd_word"):
547 if word in words:
548 words.remove(word)
500 549
501 usetoken = False 550 usetoken = False
502 for word in words: 551 for word in words:
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 1fcb9bf14c..59a979ee90 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -24,6 +24,7 @@ import io
24import bb.event 24import bb.event
25import bb.cooker 25import bb.cooker
26import bb.remotedata 26import bb.remotedata
27import bb.parse
27 28
28class DataStoreConnectionHandle(object): 29class DataStoreConnectionHandle(object):
29 def __init__(self, dsindex=0): 30 def __init__(self, dsindex=0):
@@ -108,7 +109,7 @@ class Command:
108 109
109 def runAsyncCommand(self, _, process_server, halt): 110 def runAsyncCommand(self, _, process_server, halt):
110 try: 111 try:
111 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): 112 if self.cooker.state in (bb.cooker.State.ERROR, bb.cooker.State.SHUTDOWN, bb.cooker.State.FORCE_SHUTDOWN):
112 # updateCache will trigger a shutdown of the parser 113 # updateCache will trigger a shutdown of the parser
113 # and then raise BBHandledException triggering an exit 114 # and then raise BBHandledException triggering an exit
114 self.cooker.updateCache() 115 self.cooker.updateCache()
@@ -118,7 +119,7 @@ class Command:
118 (command, options) = cmd 119 (command, options) = cmd
119 commandmethod = getattr(CommandsAsync, command) 120 commandmethod = getattr(CommandsAsync, command)
120 needcache = getattr( commandmethod, "needcache" ) 121 needcache = getattr( commandmethod, "needcache" )
121 if needcache and self.cooker.state != bb.cooker.state.running: 122 if needcache and self.cooker.state != bb.cooker.State.RUNNING:
122 self.cooker.updateCache() 123 self.cooker.updateCache()
123 return True 124 return True
124 else: 125 else:
@@ -142,14 +143,14 @@ class Command:
142 return bb.server.process.idleFinish(traceback.format_exc()) 143 return bb.server.process.idleFinish(traceback.format_exc())
143 144
144 def finishAsyncCommand(self, msg=None, code=None): 145 def finishAsyncCommand(self, msg=None, code=None):
146 self.cooker.finishcommand()
147 self.process_server.clear_async_cmd()
145 if msg or msg == "": 148 if msg or msg == "":
146 bb.event.fire(CommandFailed(msg), self.cooker.data) 149 bb.event.fire(CommandFailed(msg), self.cooker.data)
147 elif code: 150 elif code:
148 bb.event.fire(CommandExit(code), self.cooker.data) 151 bb.event.fire(CommandExit(code), self.cooker.data)
149 else: 152 else:
150 bb.event.fire(CommandCompleted(), self.cooker.data) 153 bb.event.fire(CommandCompleted(), self.cooker.data)
151 self.cooker.finishcommand()
152 self.process_server.clear_async_cmd()
153 154
154 def reset(self): 155 def reset(self):
155 if self.remotedatastores: 156 if self.remotedatastores:
@@ -310,7 +311,7 @@ class CommandsSync:
310 def revalidateCaches(self, command, params): 311 def revalidateCaches(self, command, params):
311 """Called by UI clients when metadata may have changed""" 312 """Called by UI clients when metadata may have changed"""
312 command.cooker.revalidateCaches() 313 command.cooker.revalidateCaches()
313 parseConfiguration.needconfig = False 314 revalidateCaches.needconfig = False
314 315
315 def getRecipes(self, command, params): 316 def getRecipes(self, command, params):
316 try: 317 try:
@@ -420,15 +421,30 @@ class CommandsSync:
420 return command.cooker.recipecaches[mc].pkg_dp 421 return command.cooker.recipecaches[mc].pkg_dp
421 getDefaultPreference.readonly = True 422 getDefaultPreference.readonly = True
422 423
424
423 def getSkippedRecipes(self, command, params): 425 def getSkippedRecipes(self, command, params):
426 """
427 Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`).
428
429 Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes`
430
431 :param command: Internally used parameter.
432 :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed.
433 :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage`
434 """
435 try:
436 mc = params[0]
437 except IndexError:
438 mc = ''
439
424 # Return list sorted by reverse priority order 440 # Return list sorted by reverse priority order
425 import bb.cache 441 import bb.cache
426 def sortkey(x): 442 def sortkey(x):
427 vfn, _ = x 443 vfn, _ = x
428 realfn, _, mc = bb.cache.virtualfn2realfn(vfn) 444 realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn)
429 return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) 445 return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn
430 446
431 skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) 447 skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey))
432 return list(skipdict.items()) 448 return list(skipdict.items())
433 getSkippedRecipes.readonly = True 449 getSkippedRecipes.readonly = True
434 450
@@ -582,6 +598,13 @@ class CommandsSync:
582 return DataStoreConnectionHandle(idx) 598 return DataStoreConnectionHandle(idx)
583 parseRecipeFile.readonly = True 599 parseRecipeFile.readonly = True
584 600
601 def finalizeData(self, command, params):
602 newdata = command.cooker.data.createCopy()
603 bb.data.expandKeys(newdata)
604 bb.parse.ast.runAnonFuncs(newdata)
605 idx = command.remotedatastores.store(newdata)
606 return DataStoreConnectionHandle(idx)
607
585class CommandsAsync: 608class CommandsAsync:
586 """ 609 """
587 A class of asynchronous commands 610 A class of asynchronous commands
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
index 88b0989322..2a64681c86 100644
--- a/bitbake/lib/bb/compress/lz4.py
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -13,7 +13,7 @@ def open(*args, **kwargs):
13 13
14class LZ4File(bb.compress._pipecompress.PipeFile): 14class LZ4File(bb.compress._pipecompress.PipeFile):
15 def get_compress(self): 15 def get_compress(self):
16 return ["lz4c", "-z", "-c"] 16 return ["lz4", "-z", "-c"]
17 17
18 def get_decompress(self): 18 def get_decompress(self):
19 return ["lz4c", "-d", "-c"] 19 return ["lz4", "-d", "-c"]
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index c5bfef55d6..1810bcc604 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,7 +8,7 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
@@ -17,7 +17,7 @@ import threading
17from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
18from contextlib import closing 18from contextlib import closing
19from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
20import bb, bb.exceptions, bb.command 20import bb, bb.command
21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
22import queue 22import queue
23import signal 23import signal
@@ -48,16 +48,15 @@ class CollectionError(bb.BBHandledException):
48 Exception raised when layer configuration is incorrect 48 Exception raised when layer configuration is incorrect
49 """ 49 """
50 50
51class state:
52 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
53 51
54 @classmethod 52class State(enum.Enum):
55 def get_name(cls, code): 53 INITIAL = 0,
56 for name in dir(cls): 54 PARSING = 1,
57 value = getattr(cls, name) 55 RUNNING = 2,
58 if type(value) == type(cls.initial) and value == code: 56 SHUTDOWN = 3,
59 return name 57 FORCE_SHUTDOWN = 4,
60 raise ValueError("Invalid status code: %s" % code) 58 STOPPED = 5,
59 ERROR = 6
61 60
62 61
63class SkippedPackage: 62class SkippedPackage:
@@ -134,7 +133,8 @@ class BBCooker:
134 self.baseconfig_valid = False 133 self.baseconfig_valid = False
135 self.parsecache_valid = False 134 self.parsecache_valid = False
136 self.eventlog = None 135 self.eventlog = None
137 self.skiplist = {} 136 # The skiplists, one per multiconfig
137 self.skiplist_by_mc = defaultdict(dict)
138 self.featureset = CookerFeatures() 138 self.featureset = CookerFeatures()
139 if featureSet: 139 if featureSet:
140 for f in featureSet: 140 for f in featureSet:
@@ -180,7 +180,7 @@ class BBCooker:
180 pass 180 pass
181 181
182 self.command = bb.command.Command(self, self.process_server) 182 self.command = bb.command.Command(self, self.process_server)
183 self.state = state.initial 183 self.state = State.INITIAL
184 184
185 self.parser = None 185 self.parser = None
186 186
@@ -226,23 +226,22 @@ class BBCooker:
226 bb.warn("Cooker received SIGTERM, shutting down...") 226 bb.warn("Cooker received SIGTERM, shutting down...")
227 elif signum == signal.SIGHUP: 227 elif signum == signal.SIGHUP:
228 bb.warn("Cooker received SIGHUP, shutting down...") 228 bb.warn("Cooker received SIGHUP, shutting down...")
229 self.state = state.forceshutdown 229 self.state = State.FORCE_SHUTDOWN
230 bb.event._should_exit.set() 230 bb.event._should_exit.set()
231 231
232 def setFeatures(self, features): 232 def setFeatures(self, features):
233 # we only accept a new feature set if we're in state initial, so we can reset without problems 233 # we only accept a new feature set if we're in state initial, so we can reset without problems
234 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 234 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
235 raise Exception("Illegal state for feature set change") 235 raise Exception("Illegal state for feature set change")
236 original_featureset = list(self.featureset) 236 original_featureset = list(self.featureset)
237 for feature in features: 237 for feature in features:
238 self.featureset.setFeature(feature) 238 self.featureset.setFeature(feature)
239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
240 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 240 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
241 self.reset() 241 self.reset()
242 242
243 def initConfigurationData(self): 243 def initConfigurationData(self):
244 244 self.state = State.INITIAL
245 self.state = state.initial
246 self.caches_array = [] 245 self.caches_array = []
247 246
248 sys.path = self.orig_syspath.copy() 247 sys.path = self.orig_syspath.copy()
@@ -281,7 +280,6 @@ class BBCooker:
281 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 280 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
282 self.databuilder.parseBaseConfiguration() 281 self.databuilder.parseBaseConfiguration()
283 self.data = self.databuilder.data 282 self.data = self.databuilder.data
284 self.data_hash = self.databuilder.data_hash
285 self.extraconfigdata = {} 283 self.extraconfigdata = {}
286 284
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG") 285 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
@@ -315,13 +313,19 @@ class BBCooker:
315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 313 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 314 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream: 315 if upstream:
318 import socket
319 try: 316 try:
320 sock = socket.create_connection(upstream.split(":"), 5) 317 with hashserv.create_client(upstream) as client:
321 sock.close() 318 client.ping()
322 except socket.error as e: 319 except ImportError as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 320 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
321%s
322Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball).
323You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
324 % (upstream, repr(e)))
325 except ConnectionError as e:
326 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
324 % (upstream, repr(e))) 327 % (upstream, repr(e)))
328 upstream = None
325 329
326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 330 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
327 self.hashserv = hashserv.create_server( 331 self.hashserv = hashserv.create_server(
@@ -370,6 +374,11 @@ class BBCooker:
370 if not clean: 374 if not clean:
371 bb.parse.BBHandler.cached_statements = {} 375 bb.parse.BBHandler.cached_statements = {}
372 376
377 # If writes were made to any of the data stores, we need to recalculate the data
378 # store cache
379 if hasattr(self, "databuilder"):
380 self.databuilder.calc_datastore_hashes()
381
373 def parseConfiguration(self): 382 def parseConfiguration(self):
374 self.updateCacheSync() 383 self.updateCacheSync()
375 384
@@ -612,8 +621,8 @@ class BBCooker:
612 localdata = {} 621 localdata = {}
613 622
614 for mc in self.multiconfigs: 623 for mc in self.multiconfigs:
615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete) 624 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 625 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
617 bb.data.expandKeys(localdata[mc]) 626 bb.data.expandKeys(localdata[mc])
618 627
619 current = 0 628 current = 0
@@ -680,14 +689,14 @@ class BBCooker:
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 689 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist 690 return taskdata, runlist
682 691
683 def prepareTreeData(self, pkgs_to_build, task): 692 def prepareTreeData(self, pkgs_to_build, task, halt=False):
684 """ 693 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 694 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """ 695 """
687 696
688 # We set halt to False here to prevent unbuildable targets raising 697 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data 698 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 699 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
691 700
692 return runlist, taskdata 701 return runlist, taskdata
693 702
@@ -701,7 +710,7 @@ class BBCooker:
701 if not task.startswith("do_"): 710 if not task.startswith("do_"):
702 task = "do_%s" % task 711 task = "do_%s" % task
703 712
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 713 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 714 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare() 715 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata) 716 return self.buildDependTree(rq, taskdata)
@@ -896,10 +905,11 @@ class BBCooker:
896 905
897 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
898 907
899 with open('pn-buildlist', 'w') as f: 908 pns = depgraph["pn"].keys()
900 for pn in depgraph["pn"]: 909 if pns:
901 f.write(pn + "\n") 910 with open('pn-buildlist', 'w') as f:
902 logger.info("PN build list saved to 'pn-buildlist'") 911 f.write("%s\n" % "\n".join(sorted(pns)))
912 logger.info("PN build list saved to 'pn-buildlist'")
903 913
904 # Remove old format output files to ensure no confusion with stale data 914 # Remove old format output files to ensure no confusion with stale data
905 try: 915 try:
@@ -933,7 +943,7 @@ class BBCooker:
933 for mc in self.multiconfigs: 943 for mc in self.multiconfigs:
934 # First get list of recipes, including skipped 944 # First get list of recipes, including skipped
935 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 945 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
936 recipefns.extend(self.skiplist.keys()) 946 recipefns.extend(self.skiplist_by_mc[mc].keys())
937 947
938 # Work out list of bbappends that have been applied 948 # Work out list of bbappends that have been applied
939 applied_appends = [] 949 applied_appends = []
@@ -952,13 +962,7 @@ class BBCooker:
952 '\n '.join(appends_without_recipes[mc]))) 962 '\n '.join(appends_without_recipes[mc])))
953 963
954 if msgs: 964 if msgs:
955 msg = "\n".join(msgs) 965 bb.fatal("\n".join(msgs))
956 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
957 False) or "no"
958 if warn_only.lower() in ("1", "yes", "true"):
959 bb.warn(msg)
960 else:
961 bb.fatal(msg)
962 966
963 def handlePrefProviders(self): 967 def handlePrefProviders(self):
964 968
@@ -1338,7 +1342,7 @@ class BBCooker:
1338 self.buildSetVars() 1342 self.buildSetVars()
1339 self.reset_mtime_caches() 1343 self.reset_mtime_caches()
1340 1344
1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1342 1346
1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2] 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername) 1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
@@ -1399,11 +1403,11 @@ class BBCooker:
1399 1403
1400 msg = None 1404 msg = None
1401 interrupted = 0 1405 interrupted = 0
1402 if halt or self.state == state.forceshutdown: 1406 if halt or self.state == State.FORCE_SHUTDOWN:
1403 rq.finish_runqueue(True) 1407 rq.finish_runqueue(True)
1404 msg = "Forced shutdown" 1408 msg = "Forced shutdown"
1405 interrupted = 2 1409 interrupted = 2
1406 elif self.state == state.shutdown: 1410 elif self.state == State.SHUTDOWN:
1407 rq.finish_runqueue(False) 1411 rq.finish_runqueue(False)
1408 msg = "Stopped build" 1412 msg = "Stopped build"
1409 interrupted = 1 1413 interrupted = 1
@@ -1459,7 +1463,6 @@ class BBCooker:
1459 1463
1460 if t in task or getAllTaskSignatures: 1464 if t in task or getAllTaskSignatures:
1461 try: 1465 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError: 1467 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0]) 1468 sig.append(self.getTaskSignatures(target, [t])[0])
@@ -1474,12 +1477,12 @@ class BBCooker:
1474 def buildTargetsIdle(server, rq, halt): 1477 def buildTargetsIdle(server, rq, halt):
1475 msg = None 1478 msg = None
1476 interrupted = 0 1479 interrupted = 0
1477 if halt or self.state == state.forceshutdown: 1480 if halt or self.state == State.FORCE_SHUTDOWN:
1478 bb.event._should_exit.set() 1481 bb.event._should_exit.set()
1479 rq.finish_runqueue(True) 1482 rq.finish_runqueue(True)
1480 msg = "Forced shutdown" 1483 msg = "Forced shutdown"
1481 interrupted = 2 1484 interrupted = 2
1482 elif self.state == state.shutdown: 1485 elif self.state == State.SHUTDOWN:
1483 rq.finish_runqueue(False) 1486 rq.finish_runqueue(False)
1484 msg = "Stopped build" 1487 msg = "Stopped build"
1485 interrupted = 1 1488 interrupted = 1
@@ -1574,7 +1577,7 @@ class BBCooker:
1574 1577
1575 1578
1576 def updateCacheSync(self): 1579 def updateCacheSync(self):
1577 if self.state == state.running: 1580 if self.state == State.RUNNING:
1578 return 1581 return
1579 1582
1580 if not self.baseconfig_valid: 1583 if not self.baseconfig_valid:
@@ -1584,19 +1587,19 @@ class BBCooker:
1584 1587
1585 # This is called for all async commands when self.state != running 1588 # This is called for all async commands when self.state != running
1586 def updateCache(self): 1589 def updateCache(self):
1587 if self.state == state.running: 1590 if self.state == State.RUNNING:
1588 return 1591 return
1589 1592
1590 if self.state in (state.shutdown, state.forceshutdown, state.error): 1593 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1591 if hasattr(self.parser, 'shutdown'): 1594 if hasattr(self.parser, 'shutdown'):
1592 self.parser.shutdown(clean=False) 1595 self.parser.shutdown(clean=False)
1593 self.parser.final_cleanup() 1596 self.parser.final_cleanup()
1594 raise bb.BBHandledException() 1597 raise bb.BBHandledException()
1595 1598
1596 if self.state != state.parsing: 1599 if self.state != State.PARSING:
1597 self.updateCacheSync() 1600 self.updateCacheSync()
1598 1601
1599 if self.state != state.parsing and not self.parsecache_valid: 1602 if self.state != State.PARSING and not self.parsecache_valid:
1600 bb.server.process.serverlog("Parsing started") 1603 bb.server.process.serverlog("Parsing started")
1601 self.parsewatched = {} 1604 self.parsewatched = {}
1602 1605
@@ -1630,9 +1633,10 @@ class BBCooker:
1630 self.parser = CookerParser(self, mcfilelist, total_masked) 1633 self.parser = CookerParser(self, mcfilelist, total_masked)
1631 self._parsecache_set(True) 1634 self._parsecache_set(True)
1632 1635
1633 self.state = state.parsing 1636 self.state = State.PARSING
1634 1637
1635 if not self.parser.parse_next(): 1638 if not self.parser.parse_next():
1639 bb.server.process.serverlog("Parsing completed")
1636 collectlog.debug("parsing complete") 1640 collectlog.debug("parsing complete")
1637 if self.parser.error: 1641 if self.parser.error:
1638 raise bb.BBHandledException() 1642 raise bb.BBHandledException()
@@ -1640,7 +1644,7 @@ class BBCooker:
1640 self.handlePrefProviders() 1644 self.handlePrefProviders()
1641 for mc in self.multiconfigs: 1645 for mc in self.multiconfigs:
1642 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1646 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1643 self.state = state.running 1647 self.state = State.RUNNING
1644 1648
1645 # Send an event listing all stamps reachable after parsing 1649 # Send an event listing all stamps reachable after parsing
1646 # which the metadata may use to clean up stale data 1650 # which the metadata may use to clean up stale data
@@ -1713,10 +1717,10 @@ class BBCooker:
1713 1717
1714 def shutdown(self, force=False): 1718 def shutdown(self, force=False):
1715 if force: 1719 if force:
1716 self.state = state.forceshutdown 1720 self.state = State.FORCE_SHUTDOWN
1717 bb.event._should_exit.set() 1721 bb.event._should_exit.set()
1718 else: 1722 else:
1719 self.state = state.shutdown 1723 self.state = State.SHUTDOWN
1720 1724
1721 if self.parser: 1725 if self.parser:
1722 self.parser.shutdown(clean=False) 1726 self.parser.shutdown(clean=False)
@@ -1726,7 +1730,7 @@ class BBCooker:
1726 if hasattr(self.parser, 'shutdown'): 1730 if hasattr(self.parser, 'shutdown'):
1727 self.parser.shutdown(clean=False) 1731 self.parser.shutdown(clean=False)
1728 self.parser.final_cleanup() 1732 self.parser.final_cleanup()
1729 self.state = state.initial 1733 self.state = State.INITIAL
1730 bb.event._should_exit.clear() 1734 bb.event._should_exit.clear()
1731 1735
1732 def reset(self): 1736 def reset(self):
@@ -1813,8 +1817,8 @@ class CookerCollectFiles(object):
1813 bb.event.fire(CookerExit(), eventdata) 1817 bb.event.fire(CookerExit(), eventdata)
1814 1818
1815 # We need to track where we look so that we can know when the cache is invalid. There 1819 # We need to track where we look so that we can know when the cache is invalid. There
1816 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1820 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1817 # (or os.scandir() for python 3.6+) calls while we run glob(). 1821 # calls while we run glob().
1818 origlistdir = os.listdir 1822 origlistdir = os.listdir
1819 if hasattr(os, 'scandir'): 1823 if hasattr(os, 'scandir'):
1820 origscandir = os.scandir 1824 origscandir = os.scandir
@@ -2098,7 +2102,6 @@ class Parser(multiprocessing.Process):
2098 except Exception as exc: 2102 except Exception as exc:
2099 tb = sys.exc_info()[2] 2103 tb = sys.exc_info()[2]
2100 exc.recipe = filename 2104 exc.recipe = filename
2101 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2102 return True, None, exc 2105 return True, None, exc
2103 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2106 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2104 # and for example a worker thread doesn't just exit on its own in response to 2107 # and for example a worker thread doesn't just exit on its own in response to
@@ -2113,7 +2116,7 @@ class CookerParser(object):
2113 self.mcfilelist = mcfilelist 2116 self.mcfilelist = mcfilelist
2114 self.cooker = cooker 2117 self.cooker = cooker
2115 self.cfgdata = cooker.data 2118 self.cfgdata = cooker.data
2116 self.cfghash = cooker.data_hash 2119 self.cfghash = cooker.databuilder.data_hash
2117 self.cfgbuilder = cooker.databuilder 2120 self.cfgbuilder = cooker.databuilder
2118 2121
2119 # Accounting statistics 2122 # Accounting statistics
@@ -2225,9 +2228,8 @@ class CookerParser(object):
2225 2228
2226 for process in self.processes: 2229 for process in self.processes:
2227 process.join() 2230 process.join()
2228 # Added in 3.7, cleans up zombies 2231 # clean up zombies
2229 if hasattr(process, "close"): 2232 process.close()
2230 process.close()
2231 2233
2232 bb.codeparser.parser_cache_save() 2234 bb.codeparser.parser_cache_save()
2233 bb.codeparser.parser_cache_savemerge() 2235 bb.codeparser.parser_cache_savemerge()
@@ -2237,12 +2239,13 @@ class CookerParser(object):
2237 profiles = [] 2239 profiles = []
2238 for i in self.process_names: 2240 for i in self.process_names:
2239 logfile = "profile-parse-%s.log" % i 2241 logfile = "profile-parse-%s.log" % i
2240 if os.path.exists(logfile): 2242 if os.path.exists(logfile) and os.path.getsize(logfile):
2241 profiles.append(logfile) 2243 profiles.append(logfile)
2242 2244
2243 pout = "profile-parse.log.processed" 2245 if profiles:
2244 bb.utils.process_profilelog(profiles, pout = pout) 2246 pout = "profile-parse.log.processed"
2245 print("Processed parsing statistics saved to %s" % (pout)) 2247 bb.utils.process_profilelog(profiles, pout = pout)
2248 print("Processed parsing statistics saved to %s" % (pout))
2246 2249
2247 def final_cleanup(self): 2250 def final_cleanup(self):
2248 if self.syncthread: 2251 if self.syncthread:
@@ -2299,8 +2302,12 @@ class CookerParser(object):
2299 return False 2302 return False
2300 except ParsingFailure as exc: 2303 except ParsingFailure as exc:
2301 self.error += 1 2304 self.error += 1
2302 logger.error('Unable to parse %s: %s' % 2305
2303 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2306 exc_desc = str(exc)
2307 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2308 exc_desc = 'Exited with "%d"' % exc.code
2309
2310 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2304 self.shutdown(clean=False) 2311 self.shutdown(clean=False)
2305 return False 2312 return False
2306 except bb.parse.ParseError as exc: 2313 except bb.parse.ParseError as exc:
@@ -2309,20 +2316,33 @@ class CookerParser(object):
2309 self.shutdown(clean=False, eventmsg=str(exc)) 2316 self.shutdown(clean=False, eventmsg=str(exc))
2310 return False 2317 return False
2311 except bb.data_smart.ExpansionError as exc: 2318 except bb.data_smart.ExpansionError as exc:
2319 def skip_frames(f, fn_prefix):
2320 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2321 f = f.tb_next
2322 return f
2323
2312 self.error += 1 2324 self.error += 1
2313 bbdir = os.path.dirname(__file__) + os.sep 2325 bbdir = os.path.dirname(__file__) + os.sep
2314 etype, value, _ = sys.exc_info() 2326 etype, value, tb = sys.exc_info()
2315 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2327
2328 # Remove any frames where the code comes from bitbake. This
2329 # prevents deep (and pretty useless) backtraces for expansion error
2330 tb = skip_frames(tb, bbdir)
2331 cur = tb
2332 while cur:
2333 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2334 cur = cur.tb_next
2335
2316 logger.error('ExpansionError during parsing %s', value.recipe, 2336 logger.error('ExpansionError during parsing %s', value.recipe,
2317 exc_info=(etype, value, tb)) 2337 exc_info=(etype, value, tb))
2318 self.shutdown(clean=False) 2338 self.shutdown(clean=False)
2319 return False 2339 return False
2320 except Exception as exc: 2340 except Exception as exc:
2321 self.error += 1 2341 self.error += 1
2322 etype, value, tb = sys.exc_info() 2342 _, value, _ = sys.exc_info()
2323 if hasattr(value, "recipe"): 2343 if hasattr(value, "recipe"):
2324 logger.error('Unable to parse %s' % value.recipe, 2344 logger.error('Unable to parse %s' % value.recipe,
2325 exc_info=(etype, value, exc.traceback)) 2345 exc_info=sys.exc_info())
2326 else: 2346 else:
2327 # Most likely, an exception occurred during raising an exception 2347 # Most likely, an exception occurred during raising an exception
2328 import traceback 2348 import traceback
@@ -2343,7 +2363,7 @@ class CookerParser(object):
2343 for virtualfn, info_array in result: 2363 for virtualfn, info_array in result:
2344 if info_array[0].skipped: 2364 if info_array[0].skipped:
2345 self.skipped += 1 2365 self.skipped += 1
2346 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2366 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2347 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2367 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2348 parsed=parsed, watcher = self.cooker.add_filewatch) 2368 parsed=parsed, watcher = self.cooker.add_filewatch)
2349 return True 2369 return True
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 0649e40995..65c153a5bb 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -1,3 +1,4 @@
1
1# 2#
2# Copyright (C) 2003, 2004 Chris Larson 3# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell 4# Copyright (C) 2003, 2004 Phil Blundell
@@ -254,14 +255,21 @@ class CookerDataBuilder(object):
254 self.data = self.basedata 255 self.data = self.basedata
255 self.mcdata = {} 256 self.mcdata = {}
256 257
258 def calc_datastore_hashes(self):
259 data_hash = hashlib.sha256()
260 data_hash.update(self.data.get_hash().encode('utf-8'))
261 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
262 for config in multiconfig:
263 data_hash.update(self.mcdata[config].get_hash().encode('utf-8'))
264 self.data_hash = data_hash.hexdigest()
265
257 def parseBaseConfiguration(self, worker=False): 266 def parseBaseConfiguration(self, worker=False):
258 mcdata = {} 267 mcdata = {}
259 data_hash = hashlib.sha256()
260 try: 268 try:
261 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) 269 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
262 270
263 if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker: 271 servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker
264 bb.fetch.fetcher_init(self.data) 272 bb.fetch.fetcher_init(self.data, servercontext)
265 bb.parse.init_parser(self.data) 273 bb.parse.init_parser(self.data)
266 274
267 bb.event.fire(bb.event.ConfigParsed(), self.data) 275 bb.event.fire(bb.event.ConfigParsed(), self.data)
@@ -279,7 +287,6 @@ class CookerDataBuilder(object):
279 bb.event.fire(bb.event.ConfigParsed(), self.data) 287 bb.event.fire(bb.event.ConfigParsed(), self.data)
280 288
281 bb.parse.init_parser(self.data) 289 bb.parse.init_parser(self.data)
282 data_hash.update(self.data.get_hash().encode('utf-8'))
283 mcdata[''] = self.data 290 mcdata[''] = self.data
284 291
285 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
@@ -289,11 +296,9 @@ class CookerDataBuilder(object):
289 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 296 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
290 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata) 297 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
291 mcdata[config] = parsed_mcdata 298 mcdata[config] = parsed_mcdata
292 data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
293 if multiconfig: 299 if multiconfig:
294 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data) 300 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
295 301
296 self.data_hash = data_hash.hexdigest()
297 except bb.data_smart.ExpansionError as e: 302 except bb.data_smart.ExpansionError as e:
298 logger.error(str(e)) 303 logger.error(str(e))
299 raise bb.BBHandledException() 304 raise bb.BBHandledException()
@@ -328,6 +333,7 @@ class CookerDataBuilder(object):
328 for mc in mcdata: 333 for mc in mcdata:
329 self.mcdata[mc] = bb.data.createCopy(mcdata[mc]) 334 self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
330 self.data = self.mcdata[''] 335 self.data = self.mcdata['']
336 self.calc_datastore_hashes()
331 337
332 def reset(self): 338 def reset(self):
333 # We may not have run parseBaseConfiguration() yet 339 # We may not have run parseBaseConfiguration() yet
@@ -340,7 +346,7 @@ class CookerDataBuilder(object):
340 def _findLayerConf(self, data): 346 def _findLayerConf(self, data):
341 return findConfigFile("bblayers.conf", data) 347 return findConfigFile("bblayers.conf", data)
342 348
343 def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): 349 def parseConfigurationFiles(self, prefiles, postfiles, mc = ""):
344 data = bb.data.createCopy(self.basedata) 350 data = bb.data.createCopy(self.basedata)
345 data.setVar("BB_CURRENT_MC", mc) 351 data.setVar("BB_CURRENT_MC", mc)
346 352
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 505f42950f..f672a84451 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -293,7 +293,7 @@ def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_va
293 if key in mod_funcs: 293 if key in mod_funcs:
294 exclusions = set() 294 exclusions = set()
295 moddep = bb.codeparser.modulecode_deps[key] 295 moddep = bb.codeparser.modulecode_deps[key]
296 value = handle_contains("", moddep[3], exclusions, d) 296 value = handle_contains(moddep[4], moddep[3], exclusions, d)
297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value 297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
298 298
299 if key[-1] == ']': 299 if key[-1] == ']':
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 0128a5bb17..8e7dd98384 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -31,7 +31,7 @@ logger = logging.getLogger("BitBake.Data")
31 31
32__setvar_keyword__ = [":append", ":prepend", ":remove"] 32__setvar_keyword__ = [":append", ":prepend", ":remove"]
33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$') 33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}") 34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}")
35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}") 35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
36__whitespace_split__ = re.compile(r'(\s)') 36__whitespace_split__ = re.compile(r'(\s)')
37__override_regexp__ = re.compile(r'[a-z0-9]+') 37__override_regexp__ = re.compile(r'[a-z0-9]+')
@@ -106,52 +106,52 @@ class VariableParse:
106 self.contains = {} 106 self.contains = {}
107 107
108 def var_sub(self, match): 108 def var_sub(self, match):
109 key = match.group()[2:-1] 109 key = match.group()[2:-1]
110 if self.varname and key: 110 if self.varname and key:
111 if self.varname == key: 111 if self.varname == key:
112 raise Exception("variable %s references itself!" % self.varname) 112 raise Exception("variable %s references itself!" % self.varname)
113 var = self.d.getVarFlag(key, "_content") 113 var = self.d.getVarFlag(key, "_content")
114 self.references.add(key) 114 self.references.add(key)
115 if var is not None: 115 if var is not None:
116 return var 116 return var
117 else: 117 else:
118 return match.group() 118 return match.group()
119 119
120 def python_sub(self, match): 120 def python_sub(self, match):
121 if isinstance(match, str): 121 if isinstance(match, str):
122 code = match 122 code = match
123 else: 123 else:
124 code = match.group()[3:-1] 124 code = match.group()[3:-1]
125 125
126 # Do not run code that contains one or more unexpanded variables 126 # Do not run code that contains one or more unexpanded variables
127 # instead return the code with the characters we removed put back 127 # instead return the code with the characters we removed put back
128 if __expand_var_regexp__.findall(code): 128 if __expand_var_regexp__.findall(code):
129 return "${@" + code + "}" 129 return "${@" + code + "}"
130 130
131 if self.varname: 131 if self.varname:
132 varname = 'Var <%s>' % self.varname 132 varname = 'Var <%s>' % self.varname
133 else: 133 else:
134 varname = '<expansion>' 134 varname = '<expansion>'
135 codeobj = compile(code.strip(), varname, "eval") 135 codeobj = compile(code.strip(), varname, "eval")
136 136
137 parser = bb.codeparser.PythonParser(self.varname, logger) 137 parser = bb.codeparser.PythonParser(self.varname, logger)
138 parser.parse_python(code) 138 parser.parse_python(code)
139 if self.varname: 139 if self.varname:
140 vardeps = self.d.getVarFlag(self.varname, "vardeps") 140 vardeps = self.d.getVarFlag(self.varname, "vardeps")
141 if vardeps is None: 141 if vardeps is None:
142 parser.log.flush()
143 else:
144 parser.log.flush() 142 parser.log.flush()
145 self.references |= parser.references 143 else:
146 self.execs |= parser.execs 144 parser.log.flush()
145 self.references |= parser.references
146 self.execs |= parser.execs
147 147
148 for k in parser.contains: 148 for k in parser.contains:
149 if k not in self.contains: 149 if k not in self.contains:
150 self.contains[k] = parser.contains[k].copy() 150 self.contains[k] = parser.contains[k].copy()
151 else: 151 else:
152 self.contains[k].update(parser.contains[k]) 152 self.contains[k].update(parser.contains[k])
153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) 153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
154 return str(value) 154 return str(value)
155 155
156class DataContext(dict): 156class DataContext(dict):
157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe']) 157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
@@ -272,12 +272,9 @@ class VariableHistory(object):
272 return 272 return
273 if 'op' not in loginfo or not loginfo['op']: 273 if 'op' not in loginfo or not loginfo['op']:
274 loginfo['op'] = 'set' 274 loginfo['op'] = 'set'
275 if 'detail' in loginfo:
276 loginfo['detail'] = str(loginfo['detail'])
277 if 'variable' not in loginfo or 'file' not in loginfo: 275 if 'variable' not in loginfo or 'file' not in loginfo:
278 raise ValueError("record() missing variable or file.") 276 raise ValueError("record() missing variable or file.")
279 var = loginfo['variable'] 277 var = loginfo['variable']
280
281 if var not in self.variables: 278 if var not in self.variables:
282 self.variables[var] = [] 279 self.variables[var] = []
283 if not isinstance(self.variables[var], list): 280 if not isinstance(self.variables[var], list):
@@ -336,7 +333,8 @@ class VariableHistory(object):
336 flag = '[%s] ' % (event['flag']) 333 flag = '[%s] ' % (event['flag'])
337 else: 334 else:
338 flag = '' 335 flag = ''
339 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) 336 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \
337 (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail']))))
340 if len(history) > 1: 338 if len(history) > 1:
341 o.write("# pre-expansion value:\n") 339 o.write("# pre-expansion value:\n")
342 o.write('# "%s"\n' % (commentVal)) 340 o.write('# "%s"\n' % (commentVal))
@@ -390,7 +388,7 @@ class VariableHistory(object):
390 if isset and event['op'] == 'set?': 388 if isset and event['op'] == 'set?':
391 continue 389 continue
392 isset = True 390 isset = True
393 items = d.expand(event['detail']).split() 391 items = d.expand(str(event['detail'])).split()
394 for item in items: 392 for item in items:
395 # This is a little crude but is belt-and-braces to avoid us 393 # This is a little crude but is belt-and-braces to avoid us
396 # having to handle every possible operation type specifically 394 # having to handle every possible operation type specifically
@@ -582,12 +580,10 @@ class DataSmart(MutableMapping):
582 else: 580 else:
583 loginfo['op'] = keyword 581 loginfo['op'] = keyword
584 self.varhistory.record(**loginfo) 582 self.varhistory.record(**loginfo)
585 # todo make sure keyword is not __doc__ or __module__
586 # pay the cookie monster 583 # pay the cookie monster
587 584
588 # more cookies for the cookie monster 585 # more cookies for the cookie monster
589 if ':' in var: 586 self._setvar_update_overrides(base, **loginfo)
590 self._setvar_update_overrides(base, **loginfo)
591 587
592 if base in self.overridevars: 588 if base in self.overridevars:
593 self._setvar_update_overridevars(var, value) 589 self._setvar_update_overridevars(var, value)
@@ -640,6 +636,7 @@ class DataSmart(MutableMapping):
640 nextnew.update(vardata.contains.keys()) 636 nextnew.update(vardata.contains.keys())
641 new = nextnew 637 new = nextnew
642 self.overrides = None 638 self.overrides = None
639 self.expand_cache = {}
643 640
644 def _setvar_update_overrides(self, var, **loginfo): 641 def _setvar_update_overrides(self, var, **loginfo):
645 # aka pay the cookie monster 642 # aka pay the cookie monster
@@ -829,6 +826,8 @@ class DataSmart(MutableMapping):
829 value = copy.copy(local_var[flag]) 826 value = copy.copy(local_var[flag])
830 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: 827 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
831 value = copy.copy(local_var["_defaultval"]) 828 value = copy.copy(local_var["_defaultval"])
829 elif "_defaultval_flag_"+flag in local_var and not noweakdefault:
830 value = copy.copy(local_var["_defaultval_flag_"+flag])
832 831
833 832
834 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing: 833 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
@@ -920,6 +919,8 @@ class DataSmart(MutableMapping):
920 self.varhistory.record(**loginfo) 919 self.varhistory.record(**loginfo)
921 920
922 del self.dict[var][flag] 921 del self.dict[var][flag]
922 if ("_defaultval_flag_" + flag) in self.dict[var]:
923 del self.dict[var]["_defaultval_flag_" + flag]
923 924
924 def appendVarFlag(self, var, flag, value, **loginfo): 925 def appendVarFlag(self, var, flag, value, **loginfo):
925 loginfo['op'] = 'append' 926 loginfo['op'] = 'append'
@@ -954,17 +955,22 @@ class DataSmart(MutableMapping):
954 flags = {} 955 flags = {}
955 956
956 if local_var: 957 if local_var:
957 for i in local_var: 958 for i, val in local_var.items():
958 if i.startswith(("_", ":")) and not internalflags: 959 if i.startswith("_defaultval_flag_") and not internalflags:
960 i = i[len("_defaultval_flag_"):]
961 if i not in local_var:
962 flags[i] = val
963 elif i.startswith(("_", ":")) and not internalflags:
959 continue 964 continue
960 flags[i] = local_var[i] 965 else:
966 flags[i] = val
967
961 if expand and i in expand: 968 if expand and i in expand:
962 flags[i] = self.expand(flags[i], var + "[" + i + "]") 969 flags[i] = self.expand(flags[i], var + "[" + i + "]")
963 if len(flags) == 0: 970 if len(flags) == 0:
964 return None 971 return None
965 return flags 972 return flags
966 973
967
968 def delVarFlags(self, var, **loginfo): 974 def delVarFlags(self, var, **loginfo):
969 self.expand_cache = {} 975 self.expand_cache = {}
970 if not var in self.dict: 976 if not var in self.dict:
@@ -1114,5 +1120,10 @@ class DataSmart(MutableMapping):
1114 value = d.getVar(i, False) or "" 1120 value = d.getVar(i, False) or ""
1115 data.update({i:value}) 1121 data.update({i:value})
1116 1122
1123 moddeps = bb.codeparser.modulecode_deps
1124 for dep in sorted(moddeps):
1125 # Ignore visitor code, sort sets
1126 data.update({'moddep[%s]' % dep : [sorted(moddeps[dep][0]), sorted(moddeps[dep][1]), sorted(moddeps[dep][2]), sorted(moddeps[dep][3]), moddeps[dep][4]]})
1127
1117 data_str = str([(k, data[k]) for k in sorted(data.keys())]) 1128 data_str = str([(k, data[k]) for k in sorted(data.keys())])
1118 return hashlib.sha256(data_str.encode("utf-8")).hexdigest() 1129 return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 4761c86880..b29f0a5568 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
19import threading 19import threading
20import traceback 20import traceback
21 21
22import bb.exceptions
23import bb.utils 22import bb.utils
24 23
25# This is the pid for which we should generate the event. This is set when 24# This is the pid for which we should generate the event. This is set when
@@ -195,7 +194,12 @@ def fire_ui_handlers(event, d):
195 ui_queue.append(event) 194 ui_queue.append(event)
196 return 195 return
197 196
198 with bb.utils.lock_timeout(_thread_lock): 197 with bb.utils.lock_timeout_nocheck(_thread_lock) as lock:
198 if not lock:
199 # If we can't get the lock, we may be recursively called, queue and return
200 ui_queue.append(event)
201 return
202
199 errors = [] 203 errors = []
200 for h in _ui_handlers: 204 for h in _ui_handlers:
201 #print "Sending event %s" % event 205 #print "Sending event %s" % event
@@ -214,6 +218,9 @@ def fire_ui_handlers(event, d):
214 for h in errors: 218 for h in errors:
215 del _ui_handlers[h] 219 del _ui_handlers[h]
216 220
221 while ui_queue:
222 fire_ui_handlers(ui_queue.pop(), d)
223
217def fire(event, d): 224def fire(event, d):
218 """Fire off an Event""" 225 """Fire off an Event"""
219 226
@@ -424,6 +431,16 @@ class RecipeEvent(Event):
424 self.fn = fn 431 self.fn = fn
425 Event.__init__(self) 432 Event.__init__(self)
426 433
434class RecipePreDeferredInherits(RecipeEvent):
435 """
436 Called before deferred inherits are processed so code can snoop on class extensions for example
437 Limitations: It won't see inherits of inherited classes and the data is unexpanded
438 """
439 def __init__(self, fn, inherits):
440 self.fn = fn
441 self.inherits = inherits
442 Event.__init__(self)
443
427class RecipePreFinalise(RecipeEvent): 444class RecipePreFinalise(RecipeEvent):
428 """ Recipe Parsing Complete but not yet finalised""" 445 """ Recipe Parsing Complete but not yet finalised"""
429 446
@@ -759,13 +776,7 @@ class LogHandler(logging.Handler):
759 776
760 def emit(self, record): 777 def emit(self, record):
761 if record.exc_info: 778 if record.exc_info:
762 etype, value, tb = record.exc_info 779 record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
763 if hasattr(tb, 'tb_next'):
764 tb = list(bb.exceptions.extract_traceback(tb, context=3))
765 # Need to turn the value into something the logging system can pickle
766 record.bb_exc_info = (etype, value, tb)
767 record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
768 value = str(value)
769 record.exc_info = None 780 record.exc_info = None
770 fire(record, None) 781 fire(record, None)
771 782
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index 801db9c82f..0000000000
--- a/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,96 +0,0 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import inspect
8import traceback
9import bb.namedtuple_with_abc
10from collections import namedtuple
11
12
13class TracebackEntry(namedtuple.abc):
14 """Pickleable representation of a traceback entry"""
15 _fields = 'filename lineno function args code_context index'
16 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
17
18 def format(self, formatter=None):
19 if not self.code_context:
20 return self._header.format(self) + '\n'
21
22 formatted = [self._header.format(self) + ':\n']
23
24 for lineindex, line in enumerate(self.code_context):
25 if formatter:
26 line = formatter(line)
27
28 if lineindex == self.index:
29 formatted.append(' >%s' % line)
30 else:
31 formatted.append(' %s' % line)
32 return formatted
33
34 def __str__(self):
35 return ''.join(self.format())
36
37def _get_frame_args(frame):
38 """Get the formatted arguments and class (if available) for a frame"""
39 arginfo = inspect.getargvalues(frame)
40
41 try:
42 if not arginfo.args:
43 return '', None
44 # There have been reports from the field of python 2.6 which doesn't
45 # return a namedtuple here but simply a tuple so fallback gracefully if
46 # args isn't present.
47 except AttributeError:
48 return '', None
49
50 firstarg = arginfo.args[0]
51 if firstarg == 'self':
52 self = arginfo.locals['self']
53 cls = self.__class__.__name__
54
55 arginfo.args.pop(0)
56 del arginfo.locals['self']
57 else:
58 cls = None
59
60 formatted = inspect.formatargvalues(*arginfo)
61 return formatted, cls
62
63def extract_traceback(tb, context=1):
64 frames = inspect.getinnerframes(tb, context)
65 for frame, filename, lineno, function, code_context, index in frames:
66 formatted_args, cls = _get_frame_args(frame)
67 if cls:
68 function = '%s.%s' % (cls, function)
69 yield TracebackEntry(filename, lineno, function, formatted_args,
70 code_context, index)
71
72def format_extracted(extracted, formatter=None, limit=None):
73 if limit:
74 extracted = extracted[-limit:]
75
76 formatted = []
77 for tracebackinfo in extracted:
78 formatted.extend(tracebackinfo.format(formatter))
79 return formatted
80
81
82def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
83 formatted = ['Traceback (most recent call last):\n']
84
85 if hasattr(tb, 'tb_next'):
86 tb = extract_traceback(tb, context)
87
88 formatted.extend(format_extracted(tb, formatter, limit))
89 formatted.extend(traceback.format_exception_only(etype, value))
90 return formatted
91
92def to_string(exc):
93 if isinstance(exc, SystemExit):
94 if not isinstance(exc.code, str):
95 return 'Exited with "%d"' % exc.code
96 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 5bf2c4b8cf..0ad987c596 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -23,17 +23,18 @@ import collections
23import subprocess 23import subprocess
24import pickle 24import pickle
25import errno 25import errno
26import bb.persist_data, bb.utils 26import bb.utils
27import bb.checksum 27import bb.checksum
28import bb.process 28import bb.process
29import bb.event 29import bb.event
30 30
31__version__ = "2" 31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache() 32_checksum_cache = bb.checksum.FileChecksumCache()
33_revisions_cache = bb.checksum.RevisionsCache()
33 34
34logger = logging.getLogger("BitBake.Fetcher") 35logger = logging.getLogger("BitBake.Fetcher")
35 36
36CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] 37CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"] 38SHOWN_CHECKSUM_LIST = ["sha256"]
38 39
39class BBFetchException(Exception): 40class BBFetchException(Exception):
@@ -237,7 +238,7 @@ class URI(object):
237 # to RFC compliant URL format. E.g.: 238 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff 239 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden: 240 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1) 241 uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
241 reparse = 1 242 reparse = 1
242 243
243 if reparse: 244 if reparse:
@@ -352,6 +353,14 @@ def decodeurl(url):
352 user, password, parameters). 353 user, password, parameters).
353 """ 354 """
354 355
356 uri = URI(url)
357 path = uri.path if uri.path else "/"
358 return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params
359
360def decodemirrorurl(url):
361 """Decodes a mirror URL into the tokens (scheme, network location, path,
362 user, password, parameters).
363 """
355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) 364 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
356 if not m: 365 if not m:
357 raise MalformedUrl(url) 366 raise MalformedUrl(url)
@@ -370,6 +379,9 @@ def decodeurl(url):
370 elif type.lower() == 'file': 379 elif type.lower() == 'file':
371 host = "" 380 host = ""
372 path = location 381 path = location
382 if user:
383 path = user + '@' + path
384 user = ""
373 else: 385 else:
374 host = location 386 host = location
375 path = "/" 387 path = "/"
@@ -402,32 +414,34 @@ def encodeurl(decoded):
402 414
403 if not type: 415 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 416 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = ['%s://' % type] 417 uri = URI()
418 uri.scheme = type
406 if user and type != "file": 419 if user and type != "file":
407 url.append("%s" % user) 420 uri.username = user
408 if pswd: 421 if pswd:
409 url.append(":%s" % pswd) 422 uri.password = pswd
410 url.append("@")
411 if host and type != "file": 423 if host and type != "file":
412 url.append("%s" % host) 424 uri.hostname = host
413 if path: 425 if path:
414 # Standardise path to ensure comparisons work 426 # Standardise path to ensure comparisons work
415 while '//' in path: 427 while '//' in path:
416 path = path.replace("//", "/") 428 path = path.replace("//", "/")
417 url.append("%s" % urllib.parse.quote(path)) 429 uri.path = path
430 if type == "file":
431 # Use old not IETF compliant style
432 uri.relative = False
418 if p: 433 if p:
419 for parm in p: 434 uri.params = p
420 url.append(";%s=%s" % (parm, p[parm]))
421 435
422 return "".join(url) 436 return str(uri)
423 437
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 438def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 439 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing") 440 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None 441 return None
428 uri_decoded = list(decodeurl(ud.url)) 442 uri_decoded = list(decodemirrorurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find)) 443 uri_find_decoded = list(decodemirrorurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace)) 444 uri_replace_decoded = list(decodemirrorurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 445 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 446 result_decoded = ['', '', '', '', '', {}]
433 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params 447 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
@@ -460,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
460 for k in replacements: 474 for k in replacements:
461 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) 475 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
462 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) 476 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
463 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) 477 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1)
464 if loc == 2: 478 if loc == 2:
465 # Handle path manipulations 479 # Handle path manipulations
466 basename = None 480 basename = None
@@ -493,18 +507,23 @@ methods = []
493urldata_cache = {} 507urldata_cache = {}
494saved_headrevs = {} 508saved_headrevs = {}
495 509
496def fetcher_init(d): 510def fetcher_init(d, servercontext=True):
497 """ 511 """
498 Called to initialize the fetchers once the configuration data is known. 512 Called to initialize the fetchers once the configuration data is known.
499 Calls before this must not hit the cache. 513 Calls before this must not hit the cache.
500 """ 514 """
501 515
502 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 516 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
517 _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
518
519 if not servercontext:
520 return
521
503 try: 522 try:
504 # fetcher_init is called multiple times, so make sure we only save the 523 # fetcher_init is called multiple times, so make sure we only save the
505 # revs the first time it is called. 524 # revs the first time it is called.
506 if not bb.fetch2.saved_headrevs: 525 if not bb.fetch2.saved_headrevs:
507 bb.fetch2.saved_headrevs = dict(revs) 526 bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
508 except: 527 except:
509 pass 528 pass
510 529
@@ -514,11 +533,10 @@ def fetcher_init(d):
514 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 533 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
515 elif srcrev_policy == "clear": 534 elif srcrev_policy == "clear":
516 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) 535 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
517 revs.clear() 536 _revisions_cache.clear_cache()
518 else: 537 else:
519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 538 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
520 539
521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
522 540
523 for m in methods: 541 for m in methods:
524 if hasattr(m, "init"): 542 if hasattr(m, "init"):
@@ -526,9 +544,11 @@ def fetcher_init(d):
526 544
527def fetcher_parse_save(): 545def fetcher_parse_save():
528 _checksum_cache.save_extras() 546 _checksum_cache.save_extras()
547 _revisions_cache.save_extras()
529 548
530def fetcher_parse_done(): 549def fetcher_parse_done():
531 _checksum_cache.save_merge() 550 _checksum_cache.save_merge()
551 _revisions_cache.save_merge()
532 552
533def fetcher_compare_revisions(d): 553def fetcher_compare_revisions(d):
534 """ 554 """
@@ -536,7 +556,7 @@ def fetcher_compare_revisions(d):
536 when bitbake was started and return true if they have changed. 556 when bitbake was started and return true if they have changed.
537 """ 557 """
538 558
539 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) 559 headrevs = _revisions_cache.get_revs()
540 return headrevs != bb.fetch2.saved_headrevs 560 return headrevs != bb.fetch2.saved_headrevs
541 561
542def mirror_from_string(data): 562def mirror_from_string(data):
@@ -786,8 +806,8 @@ def _get_srcrev(d, method_name='sortable_revision'):
786 return "", revs 806 return "", revs
787 807
788 808
789 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 809 if len(scms) == 1:
790 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 810 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name)
791 revs.append(rev) 811 revs.append(rev)
792 if len(rev) > 10: 812 if len(rev) > 10:
793 rev = rev[:10] 813 rev = rev[:10]
@@ -808,13 +828,12 @@ def _get_srcrev(d, method_name='sortable_revision'):
808 seenautoinc = False 828 seenautoinc = False
809 for scm in scms: 829 for scm in scms:
810 ud = urldata[scm] 830 ud = urldata[scm]
811 for name in ud.names: 831 autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name)
812 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 832 revs.append(rev)
813 revs.append(rev) 833 seenautoinc = seenautoinc or autoinc
814 seenautoinc = seenautoinc or autoinc 834 if len(rev) > 10:
815 if len(rev) > 10: 835 rev = rev[:10]
816 rev = rev[:10] 836 name_to_rev[ud.name] = rev
817 name_to_rev[name] = rev
818 # Replace names by revisions in the SRCREV_FORMAT string. The approach used 837 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
819 # here can handle names being prefixes of other names and names appearing 838 # here can handle names being prefixes of other names and names appearing
820 # as substrings in revisions (in which case the name should not be 839 # as substrings in revisions (in which case the name should not be
@@ -878,6 +897,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH',
878 'AWS_SESSION_TOKEN', 897 'AWS_SESSION_TOKEN',
879 'GIT_CACHE_PATH', 898 'GIT_CACHE_PATH',
880 'REMOTE_CONTAINERS_IPC', 899 'REMOTE_CONTAINERS_IPC',
900 'GITHUB_TOKEN',
881 'SSL_CERT_DIR'] 901 'SSL_CERT_DIR']
882 902
883def get_fetcher_environment(d): 903def get_fetcher_environment(d):
@@ -1072,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1072 # If that tarball is a local file:// we need to provide a symlink to it 1092 # If that tarball is a local file:// we need to provide a symlink to it
1073 dldir = ld.getVar("DL_DIR") 1093 dldir = ld.getVar("DL_DIR")
1074 1094
1095 if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")):
1096 ld = ld.createCopy()
1097 ld.setVar("BB_NO_NETWORK", "1")
1098
1075 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): 1099 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
1076 # Create donestamp in old format to avoid triggering a re-download 1100 # Create donestamp in old format to avoid triggering a re-download
1077 if ud.donestamp: 1101 if ud.donestamp:
@@ -1093,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1093 origud.method.build_mirror_data(origud, ld) 1117 origud.method.build_mirror_data(origud, ld)
1094 return origud.localpath 1118 return origud.localpath
1095 # Otherwise the result is a local file:// and we symlink to it 1119 # Otherwise the result is a local file:// and we symlink to it
1096 ensure_symlink(ud.localpath, origud.localpath) 1120 # This may also be a link to a shallow archive
1121 # When using shallow mode, add a symlink to the original fullshallow
1122 # path to ensure a valid symlink even in the `PREMIRRORS` case
1123 origud.method.update_mirror_links(ud, origud)
1097 update_stamp(origud, ld) 1124 update_stamp(origud, ld)
1098 return ud.localpath 1125 return ud.localpath
1099 1126
@@ -1127,25 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1127 if ud.lockfile and ud.lockfile != origud.lockfile: 1154 if ud.lockfile and ud.lockfile != origud.lockfile:
1128 bb.utils.unlockfile(lf) 1155 bb.utils.unlockfile(lf)
1129 1156
1130
1131def ensure_symlink(target, link_name):
1132 if not os.path.exists(link_name):
1133 dirname = os.path.dirname(link_name)
1134 bb.utils.mkdirhier(dirname)
1135 if os.path.islink(link_name):
1136 # Broken symbolic link
1137 os.unlink(link_name)
1138
1139 # In case this is executing without any file locks held (as is
1140 # the case for file:// URLs), two tasks may end up here at the
1141 # same time, in which case we do not want the second task to
1142 # fail when the link has already been created by the first task.
1143 try:
1144 os.symlink(target, link_name)
1145 except FileExistsError:
1146 pass
1147
1148
1149def try_mirrors(fetch, d, origud, mirrors, check = False): 1157def try_mirrors(fetch, d, origud, mirrors, check = False):
1150 """ 1158 """
1151 Try to use a mirrored version of the sources. 1159 Try to use a mirrored version of the sources.
@@ -1174,7 +1182,7 @@ def trusted_network(d, url):
1174 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): 1182 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
1175 return True 1183 return True
1176 1184
1177 pkgname = d.expand(d.getVar('PN', False)) 1185 pkgname = d.getVar('PN')
1178 trusted_hosts = None 1186 trusted_hosts = None
1179 if pkgname: 1187 if pkgname:
1180 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) 1188 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
@@ -1227,20 +1235,17 @@ def srcrev_internal_helper(ud, d, name):
1227 if srcrev and srcrev != "INVALID": 1235 if srcrev and srcrev != "INVALID":
1228 break 1236 break
1229 1237
1230 if 'rev' in ud.parm and 'tag' in ud.parm: 1238 if 'rev' in ud.parm:
1231 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) 1239 parmrev = ud.parm['rev']
1232
1233 if 'rev' in ud.parm or 'tag' in ud.parm:
1234 if 'rev' in ud.parm:
1235 parmrev = ud.parm['rev']
1236 else:
1237 parmrev = ud.parm['tag']
1238 if srcrev == "INVALID" or not srcrev: 1240 if srcrev == "INVALID" or not srcrev:
1239 return parmrev 1241 return parmrev
1240 if srcrev != parmrev: 1242 if srcrev != parmrev:
1241 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) 1243 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
1242 return parmrev 1244 return parmrev
1243 1245
1246 if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev):
1247 return ud.parm['tag']
1248
1244 if srcrev == "INVALID" or not srcrev: 1249 if srcrev == "INVALID" or not srcrev:
1245 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1250 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1246 if srcrev == "AUTOINC": 1251 if srcrev == "AUTOINC":
@@ -1263,7 +1268,7 @@ def get_checksum_file_list(d):
1263 found = False 1268 found = False
1264 paths = ud.method.localfile_searchpaths(ud, d) 1269 paths = ud.method.localfile_searchpaths(ud, d)
1265 for f in paths: 1270 for f in paths:
1266 pth = ud.decodedurl 1271 pth = ud.path
1267 if os.path.exists(f): 1272 if os.path.exists(f):
1268 found = True 1273 found = True
1269 filelist.append(f + ":" + str(os.path.exists(f))) 1274 filelist.append(f + ":" + str(os.path.exists(f)))
@@ -1308,23 +1313,28 @@ class FetchData(object):
1308 self.setup = False 1313 self.setup = False
1309 1314
1310 def configure_checksum(checksum_id): 1315 def configure_checksum(checksum_id):
1316 checksum_plain_name = "%ssum" % checksum_id
1311 if "name" in self.parm: 1317 if "name" in self.parm:
1312 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) 1318 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1313 else: 1319 else:
1314 checksum_name = "%ssum" % checksum_id 1320 checksum_name = checksum_plain_name
1315
1316 setattr(self, "%s_name" % checksum_id, checksum_name)
1317 1321
1318 if checksum_name in self.parm: 1322 if checksum_name in self.parm:
1319 checksum_expected = self.parm[checksum_name] 1323 checksum_expected = self.parm[checksum_name]
1320 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: 1324 elif checksum_plain_name in self.parm:
1325 checksum_expected = self.parm[checksum_plain_name]
1326 checksum_name = checksum_plain_name
1327 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]:
1321 checksum_expected = None 1328 checksum_expected = None
1322 else: 1329 else:
1323 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1330 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1324 1331
1332 setattr(self, "%s_name" % checksum_id, checksum_name)
1325 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1333 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1326 1334
1327 self.names = self.parm.get("name",'default').split(',') 1335 self.name = self.parm.get("name",'default')
1336 if "," in self.name:
1337 raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url)
1328 1338
1329 self.method = None 1339 self.method = None
1330 for m in methods: 1340 for m in methods:
@@ -1376,13 +1386,7 @@ class FetchData(object):
1376 self.lockfile = basepath + '.lock' 1386 self.lockfile = basepath + '.lock'
1377 1387
1378 def setup_revisions(self, d): 1388 def setup_revisions(self, d):
1379 self.revisions = {} 1389 self.revision = srcrev_internal_helper(self, d, self.name)
1380 for name in self.names:
1381 self.revisions[name] = srcrev_internal_helper(self, d, name)
1382
1383 # add compatibility code for non name specified case
1384 if len(self.names) == 1:
1385 self.revision = self.revisions[self.names[0]]
1386 1390
1387 def setup_localpath(self, d): 1391 def setup_localpath(self, d):
1388 if not self.localpath: 1392 if not self.localpath:
@@ -1510,7 +1514,7 @@ class FetchMethod(object):
1510 (file, urldata.parm.get('unpack'))) 1514 (file, urldata.parm.get('unpack')))
1511 1515
1512 base, ext = os.path.splitext(file) 1516 base, ext = os.path.splitext(file)
1513 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: 1517 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']:
1514 efile = os.path.join(rootdir, os.path.basename(base)) 1518 efile = os.path.join(rootdir, os.path.basename(base))
1515 else: 1519 else:
1516 efile = file 1520 efile = file
@@ -1569,11 +1573,11 @@ class FetchMethod(object):
1569 datafile = None 1573 datafile = None
1570 if output: 1574 if output:
1571 for line in output.decode().splitlines(): 1575 for line in output.decode().splitlines():
1572 if line.startswith('data.tar.'): 1576 if line.startswith('data.tar.') or line == 'data.tar':
1573 datafile = line 1577 datafile = line
1574 break 1578 break
1575 else: 1579 else:
1576 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1580 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url)
1577 else: 1581 else:
1578 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1582 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1579 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) 1583 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
@@ -1606,7 +1610,7 @@ class FetchMethod(object):
1606 if urlpath.find("/") != -1: 1610 if urlpath.find("/") != -1:
1607 destdir = urlpath.rsplit("/", 1)[0] + '/' 1611 destdir = urlpath.rsplit("/", 1)[0] + '/'
1608 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1612 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1609 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1613 cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
1610 else: 1614 else:
1611 urldata.unpack_tracer.unpack("archive-extract", unpackdir) 1615 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1612 1616
@@ -1635,6 +1639,28 @@ class FetchMethod(object):
1635 """ 1639 """
1636 bb.utils.remove(urldata.localpath) 1640 bb.utils.remove(urldata.localpath)
1637 1641
1642 def ensure_symlink(self, target, link_name):
1643 if not os.path.exists(link_name):
1644 dirname = os.path.dirname(link_name)
1645 bb.utils.mkdirhier(dirname)
1646 if os.path.islink(link_name):
1647 # Broken symbolic link
1648 os.unlink(link_name)
1649
1650 # In case this is executing without any file locks held (as is
1651 # the case for file:// URLs), two tasks may end up here at the
1652 # same time, in which case we do not want the second task to
1653 # fail when the link has already been created by the first task.
1654 try:
1655 os.symlink(target, link_name)
1656 except FileExistsError:
1657 pass
1658
1659 def update_mirror_links(self, ud, origud):
1660 # For local file:// results, create a symlink to them
1661 # This may also be a link to a shallow archive
1662 self.ensure_symlink(ud.localpath, origud.localpath)
1663
1638 def try_premirror(self, urldata, d): 1664 def try_premirror(self, urldata, d):
1639 """ 1665 """
1640 Should premirrors be used? 1666 Should premirrors be used?
@@ -1662,13 +1688,13 @@ class FetchMethod(object):
1662 if not hasattr(self, "_latest_revision"): 1688 if not hasattr(self, "_latest_revision"):
1663 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) 1689 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1664 1690
1665 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1666 key = self.generate_revision_key(ud, d, name) 1691 key = self.generate_revision_key(ud, d, name)
1667 try: 1692
1668 return revs[key] 1693 rev = _revisions_cache.get_rev(key)
1669 except KeyError: 1694 if rev is None:
1670 revs[key] = rev = self._latest_revision(ud, d, name) 1695 rev = self._latest_revision(ud, d, name)
1671 return rev 1696 _revisions_cache.set_rev(key, rev)
1697 return rev
1672 1698
1673 def sortable_revision(self, ud, d, name): 1699 def sortable_revision(self, ud, d, name):
1674 latest_rev = self._build_revision(ud, d, name) 1700 latest_rev = self._build_revision(ud, d, name)
@@ -1806,7 +1832,7 @@ class Fetch(object):
1806 self.ud[url] = FetchData(url, self.d) 1832 self.ud[url] = FetchData(url, self.d)
1807 1833
1808 self.ud[url].setup_localpath(self.d) 1834 self.ud[url].setup_localpath(self.d)
1809 return self.d.expand(self.ud[url].localpath) 1835 return self.ud[url].localpath
1810 1836
1811 def localpaths(self): 1837 def localpaths(self):
1812 """ 1838 """
@@ -1859,25 +1885,28 @@ class Fetch(object):
1859 logger.debug(str(e)) 1885 logger.debug(str(e))
1860 done = False 1886 done = False
1861 1887
1888 d = self.d
1862 if premirroronly: 1889 if premirroronly:
1863 self.d.setVar("BB_NO_NETWORK", "1") 1890 # Only disable the network in a copy
1891 d = bb.data.createCopy(self.d)
1892 d.setVar("BB_NO_NETWORK", "1")
1864 1893
1865 firsterr = None 1894 firsterr = None
1866 verified_stamp = False 1895 verified_stamp = False
1867 if done: 1896 if done:
1868 verified_stamp = m.verify_donestamp(ud, self.d) 1897 verified_stamp = m.verify_donestamp(ud, d)
1869 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1898 if not done and (not verified_stamp or m.need_update(ud, d)):
1870 try: 1899 try:
1871 if not trusted_network(self.d, ud.url): 1900 if not trusted_network(d, ud.url):
1872 raise UntrustedUrl(ud.url) 1901 raise UntrustedUrl(ud.url)
1873 logger.debug("Trying Upstream") 1902 logger.debug("Trying Upstream")
1874 m.download(ud, self.d) 1903 m.download(ud, d)
1875 if hasattr(m, "build_mirror_data"): 1904 if hasattr(m, "build_mirror_data"):
1876 m.build_mirror_data(ud, self.d) 1905 m.build_mirror_data(ud, d)
1877 done = True 1906 done = True
1878 # early checksum verify, so that if checksum mismatched, 1907 # early checksum verify, so that if checksum mismatched,
1879 # fetcher still have chance to fetch from mirror 1908 # fetcher still have chance to fetch from mirror
1880 m.update_donestamp(ud, self.d) 1909 m.update_donestamp(ud, d)
1881 1910
1882 except bb.fetch2.NetworkAccess: 1911 except bb.fetch2.NetworkAccess:
1883 raise 1912 raise
@@ -1896,17 +1925,17 @@ class Fetch(object):
1896 firsterr = e 1925 firsterr = e
1897 # Remove any incomplete fetch 1926 # Remove any incomplete fetch
1898 if not verified_stamp and m.cleanup_upon_failure(): 1927 if not verified_stamp and m.cleanup_upon_failure():
1899 m.clean(ud, self.d) 1928 m.clean(ud, d)
1900 logger.debug("Trying MIRRORS") 1929 logger.debug("Trying MIRRORS")
1901 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1930 mirrors = mirror_from_string(d.getVar('MIRRORS'))
1902 done = m.try_mirrors(self, ud, self.d, mirrors) 1931 done = m.try_mirrors(self, ud, d, mirrors)
1903 1932
1904 if not done or not m.done(ud, self.d): 1933 if not done or not m.done(ud, d):
1905 if firsterr: 1934 if firsterr:
1906 logger.error(str(firsterr)) 1935 logger.error(str(firsterr))
1907 raise FetchError("Unable to fetch URL from any source.", u) 1936 raise FetchError("Unable to fetch URL from any source.", u)
1908 1937
1909 m.update_donestamp(ud, self.d) 1938 m.update_donestamp(ud, d)
1910 1939
1911 except IOError as e: 1940 except IOError as e:
1912 if e.errno in [errno.ESTALE]: 1941 if e.errno in [errno.ESTALE]:
@@ -2088,6 +2117,7 @@ from . import npmsw
2088from . import az 2117from . import az
2089from . import crate 2118from . import crate
2090from . import gcp 2119from . import gcp
2120from . import gomod
2091 2121
2092methods.append(local.Local()) 2122methods.append(local.Local())
2093methods.append(wget.Wget()) 2123methods.append(wget.Wget())
@@ -2110,3 +2140,5 @@ methods.append(npmsw.NpmShrinkWrap())
2110methods.append(az.Az()) 2140methods.append(az.Az())
2111methods.append(crate.Crate()) 2141methods.append(crate.Crate())
2112methods.append(gcp.GCP()) 2142methods.append(gcp.GCP())
2143methods.append(gomod.GoMod())
2144methods.append(gomod.GoModGit())
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py
index 3ccc594c22..1d3664f213 100644
--- a/bitbake/lib/bb/fetch2/az.py
+++ b/bitbake/lib/bb/fetch2/az.py
@@ -36,6 +36,8 @@ class Az(Wget):
36 36
37 az_sas = d.getVar('AZ_SAS') 37 az_sas = d.getVar('AZ_SAS')
38 if az_sas and az_sas not in ud.url: 38 if az_sas and az_sas not in ud.url:
39 if not az_sas.startswith('?'):
40 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
39 ud.url += az_sas 41 ud.url += az_sas
40 42
41 return Wget.checkstatus(self, fetch, ud, d, try_again) 43 return Wget.checkstatus(self, fetch, ud, d, try_again)
@@ -62,15 +64,18 @@ class Az(Wget):
62 az_sas = d.getVar('AZ_SAS') 64 az_sas = d.getVar('AZ_SAS')
63 65
64 if az_sas: 66 if az_sas:
67 if not az_sas.startswith('?'):
68 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
65 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas) 69 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
66 else: 70 else:
67 azuri = '%s%s%s' % ('https://', ud.host, ud.path) 71 azuri = '%s%s%s' % ('https://', ud.host, ud.path)
68 72
73 dldir = d.getVar("DL_DIR")
69 if os.path.exists(ud.localpath): 74 if os.path.exists(ud.localpath):
70 # file exists, but we didnt complete it.. trying again. 75 # file exists, but we didnt complete it.. trying again.
71 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri) 76 fetchcmd += " -c -P %s '%s'" % (dldir, azuri)
72 else: 77 else:
73 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri) 78 fetchcmd += " -P %s '%s'" % (dldir, azuri)
74 79
75 try: 80 try:
76 self._runwget(ud, d, fetchcmd, False) 81 self._runwget(ud, d, fetchcmd, False)
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..17500daf95 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
108 ud.module.replace("/", "."), 108 ud.module.replace("/", "."),
109 ud.label.replace("/", ".")) 109 ud.label.replace("/", "."))
110 110
111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) 111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
112 ud.csname = "%s-config-spec" % (ud.identifier) 112 ud.csname = "%s-config-spec" % (ud.identifier)
113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) 113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) 114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -130,8 +130,6 @@ class ClearCase(FetchMethod):
130 self.debug("configspecfile = %s" % ud.configspecfile) 130 self.debug("configspecfile = %s" % ud.configspecfile)
131 self.debug("localfile = %s" % ud.localfile) 131 self.debug("localfile = %s" % ud.localfile)
132 132
133 ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
134
135 def _build_ccase_command(self, ud, command): 133 def _build_ccase_command(self, ud, command):
136 """ 134 """
137 Build up a commandline based on ud 135 Build up a commandline based on ud
@@ -196,7 +194,7 @@ class ClearCase(FetchMethod):
196 194
197 def need_update(self, ud, d): 195 def need_update(self, ud, d):
198 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): 196 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
199 ud.identifier += "-%s" % d.getVar("DATETIME",d, True) 197 ud.identifier += "-%s" % d.getVar("DATETIME")
200 return True 198 return True
201 if os.path.exists(ud.localpath): 199 if os.path.exists(ud.localpath):
202 return False 200 return False
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
index 01d49435c3..e611736f06 100644
--- a/bitbake/lib/bb/fetch2/crate.py
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -70,6 +70,7 @@ class Crate(Wget):
70 host = 'crates.io/api/v1/crates' 70 host = 'crates.io/api/v1/crates'
71 71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version) 72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
73 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) 74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
74 if 'name' not in ud.parm: 75 if 'name' not in ud.parm:
75 ud.parm['name'] = '%s-%s' % (name, version) 76 ud.parm['name'] = '%s-%s' % (name, version)
@@ -139,3 +140,11 @@ class Crate(Wget):
139 mdpath = os.path.join(bbpath, cratepath, mdfile) 140 mdpath = os.path.join(bbpath, cratepath, mdfile)
140 with open(mdpath, "w") as f: 141 with open(mdpath, "w") as f:
141 json.dump(metadata, f) 142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
index f40ce2eaa5..86546d40bf 100644
--- a/bitbake/lib/bb/fetch2/gcp.py
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -46,8 +46,7 @@ class GCP(FetchMethod):
46 else: 46 else:
47 ud.basename = os.path.basename(ud.path) 47 ud.basename = os.path.basename(ud.path)
48 48
49 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 49 ud.localfile = ud.basename
50 ud.basecmd = "gsutil stat"
51 50
52 def get_gcp_client(self): 51 def get_gcp_client(self):
53 from google.cloud import storage 52 from google.cloud import storage
@@ -58,17 +57,20 @@ class GCP(FetchMethod):
58 Fetch urls using the GCP API. 57 Fetch urls using the GCP API.
59 Assumes localpath was called first. 58 Assumes localpath was called first.
60 """ 59 """
60 from google.api_core.exceptions import NotFound
61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}") 61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
62 if self.gcp_client is None: 62 if self.gcp_client is None:
63 self.get_gcp_client() 63 self.get_gcp_client()
64 64
65 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 65 bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
66 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
67 66
68 # Path sometimes has leading slash, so strip it 67 # Path sometimes has leading slash, so strip it
69 path = ud.path.lstrip("/") 68 path = ud.path.lstrip("/")
70 blob = self.gcp_client.bucket(ud.host).blob(path) 69 blob = self.gcp_client.bucket(ud.host).blob(path)
71 blob.download_to_filename(ud.localpath) 70 try:
71 blob.download_to_filename(ud.localpath)
72 except NotFound:
73 raise FetchError("The GCP API threw a NotFound exception")
72 74
73 # Additional sanity checks copied from the wget class (although there 75 # Additional sanity checks copied from the wget class (although there
74 # are no known issues which mean these are required, treat the GCP API 76 # are no known issues which mean these are required, treat the GCP API
@@ -90,8 +92,7 @@ class GCP(FetchMethod):
90 if self.gcp_client is None: 92 if self.gcp_client is None:
91 self.get_gcp_client() 93 self.get_gcp_client()
92 94
93 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}") 95 bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
94 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
95 96
96 # Path sometimes has leading slash, so strip it 97 # Path sometimes has leading slash, so strip it
97 path = ud.path.lstrip("/") 98 path = ud.path.lstrip("/")
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index c7ff769fdf..14ec45a3f6 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -9,15 +9,6 @@ Supported SRC_URI options are:
9- branch 9- branch
10 The git branch to retrieve from. The default is "master" 10 The git branch to retrieve from. The default is "master"
11 11
12 This option also supports multiple branch fetching, with branches
13 separated by commas. In multiple branches case, the name option
14 must have the same number of names to match the branches, which is
15 used to specify the SRC_REV for the branch
16 e.g:
17 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag 12- tag
22 The git tag to retrieve. The default is "master" 13 The git tag to retrieve. The default is "master"
23 14
@@ -81,6 +72,7 @@ import shlex
81import shutil 72import shutil
82import subprocess 73import subprocess
83import tempfile 74import tempfile
75import urllib
84import bb 76import bb
85import bb.progress 77import bb.progress
86from contextlib import contextmanager 78from contextlib import contextmanager
@@ -190,14 +182,11 @@ class Git(FetchMethod):
190 ud.bareclone = ud.parm.get("bareclone","0") == "1" 182 ud.bareclone = ud.parm.get("bareclone","0") == "1"
191 if ud.bareclone: 183 if ud.bareclone:
192 ud.nocheckout = 1 184 ud.nocheckout = 1
193 185
194 ud.unresolvedrev = {} 186 ud.unresolvedrev = ""
195 branches = ud.parm.get("branch", "").split(',') 187 ud.branch = ud.parm.get("branch", "")
196 if branches == [""] and not ud.nobranch: 188 if not ud.branch and not ud.nobranch:
197 bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url) 189 raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url)
198 branches = ["master"]
199 if len(branches) != len(ud.names):
200 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
201 190
202 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1" 191 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
203 192
@@ -207,8 +196,11 @@ class Git(FetchMethod):
207 if ud.bareclone: 196 if ud.bareclone:
208 ud.cloneflags += " --mirror" 197 ud.cloneflags += " --mirror"
209 198
199 ud.shallow_skip_fast = False
210 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" 200 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
211 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() 201 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
202 if 'tag' in ud.parm:
203 ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag'])
212 204
213 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") 205 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
214 if depth_default is not None: 206 if depth_default is not None:
@@ -225,32 +217,27 @@ class Git(FetchMethod):
225 217
226 revs_default = d.getVar("BB_GIT_SHALLOW_REVS") 218 revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
227 ud.shallow_revs = [] 219 ud.shallow_revs = []
228 ud.branches = {} 220
229 for pos, name in enumerate(ud.names): 221 ud.unresolvedrev = ud.branch
230 branch = branches[pos] 222
231 ud.branches[name] = branch 223 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name)
232 ud.unresolvedrev[name] = branch 224 if shallow_depth is not None:
233 225 try:
234 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) 226 shallow_depth = int(shallow_depth or 0)
235 if shallow_depth is not None: 227 except ValueError:
236 try: 228 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
237 shallow_depth = int(shallow_depth or 0) 229 else:
238 except ValueError: 230 if shallow_depth < 0:
239 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 231 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
240 else: 232 ud.shallow_depths[ud.name] = shallow_depth
241 if shallow_depth < 0: 233
242 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 234 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name)
243 ud.shallow_depths[name] = shallow_depth 235 if revs is not None:
244 236 ud.shallow_revs.extend(revs.split())
245 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) 237 elif revs_default is not None:
246 if revs is not None: 238 ud.shallow_revs.extend(revs_default.split())
247 ud.shallow_revs.extend(revs.split()) 239
248 elif revs_default is not None: 240 if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0:
249 ud.shallow_revs.extend(revs_default.split())
250
251 if (ud.shallow and
252 not ud.shallow_revs and
253 all(ud.shallow_depths[n] == 0 for n in ud.names)):
254 # Shallow disabled for this URL 241 # Shallow disabled for this URL
255 ud.shallow = False 242 ud.shallow = False
256 243
@@ -259,10 +246,9 @@ class Git(FetchMethod):
259 # rev of this repository. This will get resolved into a revision 246 # rev of this repository. This will get resolved into a revision
260 # later. If an actual revision happens to have also been provided 247 # later. If an actual revision happens to have also been provided
261 # then this setting will be overridden. 248 # then this setting will be overridden.
262 for name in ud.names: 249 ud.unresolvedrev = 'HEAD'
263 ud.unresolvedrev[name] = 'HEAD'
264 250
265 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all" 251 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
266 252
267 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" 253 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
268 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable 254 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -270,12 +256,11 @@ class Git(FetchMethod):
270 256
271 ud.setup_revisions(d) 257 ud.setup_revisions(d)
272 258
273 for name in ud.names: 259 # Ensure any revision that doesn't look like a SHA-1 is translated into one
274 # Ensure any revision that doesn't look like a SHA-1 is translated into one 260 if not sha1_re.match(ud.revision or ''):
275 if not sha1_re.match(ud.revisions[name] or ''): 261 if ud.revision:
276 if ud.revisions[name]: 262 ud.unresolvedrev = ud.revision
277 ud.unresolvedrev[name] = ud.revisions[name] 263 ud.revision = self.latest_revision(ud, d, ud.name)
278 ud.revisions[name] = self.latest_revision(ud, d, name)
279 264
280 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_')) 265 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
281 if gitsrcname.startswith('.'): 266 if gitsrcname.startswith('.'):
@@ -286,8 +271,7 @@ class Git(FetchMethod):
286 # upstream repo in the future, the mirror will remain intact and still 271 # upstream repo in the future, the mirror will remain intact and still
287 # contain the revision 272 # contain the revision
288 if ud.rebaseable: 273 if ud.rebaseable:
289 for name in ud.names: 274 gitsrcname = gitsrcname + '_' + ud.revision
290 gitsrcname = gitsrcname + '_' + ud.revisions[name]
291 275
292 dl_dir = d.getVar("DL_DIR") 276 dl_dir = d.getVar("DL_DIR")
293 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") 277 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
@@ -305,15 +289,14 @@ class Git(FetchMethod):
305 if ud.shallow_revs: 289 if ud.shallow_revs:
306 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) 290 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
307 291
308 for name, revision in sorted(ud.revisions.items()): 292 tarballname = "%s_%s" % (tarballname, ud.revision[:7])
309 tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) 293 depth = ud.shallow_depths[ud.name]
310 depth = ud.shallow_depths[name] 294 if depth:
311 if depth: 295 tarballname = "%s-%s" % (tarballname, depth)
312 tarballname = "%s-%s" % (tarballname, depth)
313 296
314 shallow_refs = [] 297 shallow_refs = []
315 if not ud.nobranch: 298 if not ud.nobranch:
316 shallow_refs.extend(ud.branches.values()) 299 shallow_refs.append(ud.branch)
317 if ud.shallow_extra_refs: 300 if ud.shallow_extra_refs:
318 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) 301 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
319 if shallow_refs: 302 if shallow_refs:
@@ -338,18 +321,19 @@ class Git(FetchMethod):
338 return True 321 return True
339 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): 322 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
340 return True 323 return True
341 for name in ud.names: 324 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
342 if not self._contains_ref(ud, d, name, ud.clonedir): 325 return True
343 return True
344 return False 326 return False
345 327
346 def lfs_need_update(self, ud, d): 328 def lfs_need_update(self, ud, d):
329 if not self._need_lfs(ud):
330 return False
331
347 if self.clonedir_need_update(ud, d): 332 if self.clonedir_need_update(ud, d):
348 return True 333 return True
349 334
350 for name in ud.names: 335 if not self._lfs_objects_downloaded(ud, d, ud.clonedir):
351 if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir): 336 return True
352 return True
353 return False 337 return False
354 338
355 def clonedir_need_shallow_revs(self, ud, d): 339 def clonedir_need_shallow_revs(self, ud, d):
@@ -366,6 +350,13 @@ class Git(FetchMethod):
366 def tarball_need_update(self, ud): 350 def tarball_need_update(self, ud):
367 return ud.write_tarballs and not os.path.exists(ud.fullmirror) 351 return ud.write_tarballs and not os.path.exists(ud.fullmirror)
368 352
353 def update_mirror_links(self, ud, origud):
354 super().update_mirror_links(ud, origud)
355 # When using shallow mode, add a symlink to the original fullshallow
356 # path to ensure a valid symlink even in the `PREMIRRORS` case
357 if ud.shallow and not os.path.exists(origud.fullshallow):
358 self.ensure_symlink(ud.localpath, origud.fullshallow)
359
369 def try_premirror(self, ud, d): 360 def try_premirror(self, ud, d):
370 # If we don't do this, updating an existing checkout with only premirrors 361 # If we don't do this, updating an existing checkout with only premirrors
371 # is not possible 362 # is not possible
@@ -446,6 +437,24 @@ class Git(FetchMethod):
446 if ud.proto.lower() != 'file': 437 if ud.proto.lower() != 'file':
447 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 438 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
448 progresshandler = GitProgressHandler(d) 439 progresshandler = GitProgressHandler(d)
440
441 # Try creating a fast initial shallow clone
442 # Enabling ud.shallow_skip_fast will skip this
443 # If the Git error "Server does not allow request for unadvertised object"
444 # occurs, shallow_skip_fast is enabled automatically.
445 # This may happen if the Git server does not allow the request
446 # or if the Git client has issues with this functionality.
447 if ud.shallow and not ud.shallow_skip_fast:
448 try:
449 self.clone_shallow_with_tarball(ud, d)
450 # When the shallow clone has succeeded, use the shallow tarball
451 ud.localpath = ud.fullshallow
452 return
453 except:
454 logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.")
455
456 # When skipping fast initial shallow or the fast inital shallow clone failed:
457 # Try again with an initial regular clone
449 runfetchcmd(clone_cmd, d, log=progresshandler) 458 runfetchcmd(clone_cmd, d, log=progresshandler)
450 459
451 # Update the checkout if needed 460 # Update the checkout if needed
@@ -473,9 +482,8 @@ class Git(FetchMethod):
473 if exc.errno != errno.ENOENT: 482 if exc.errno != errno.ENOENT:
474 raise 483 raise
475 484
476 for name in ud.names: 485 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
477 if not self._contains_ref(ud, d, name, ud.clonedir): 486 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch))
478 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
479 487
480 if ud.shallow and ud.write_shallow_tarballs: 488 if ud.shallow and ud.write_shallow_tarballs:
481 missing_rev = self.clonedir_need_shallow_revs(ud, d) 489 missing_rev = self.clonedir_need_shallow_revs(ud, d)
@@ -483,128 +491,168 @@ class Git(FetchMethod):
483 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 491 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
484 492
485 if self.lfs_need_update(ud, d): 493 if self.lfs_need_update(ud, d):
486 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching 494 self.lfs_fetch(ud, d, ud.clonedir, ud.revision)
487 # of all LFS blobs needed at the srcrev.
488 #
489 # It would be nice to just do this inline here by running 'git-lfs fetch'
490 # on the bare clonedir, but that operation requires a working copy on some
491 # releases of Git LFS.
492 with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
493 # Do the checkout. This implicitly involves a Git LFS fetch.
494 Git.unpack(self, ud, tmpdir, d)
495
496 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
497 # the bare clonedir.
498 #
499 # As this procedure is invoked repeatedly on incremental fetches as
500 # a recipe's SRCREV is bumped throughout its lifetime, this will
501 # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
502 # corresponding to all the blobs reachable from the different revs
503 # fetched across time.
504 #
505 # Only do this if the unpack resulted in a .git/lfs directory being
506 # created; this only happens if at least one blob needed to be
507 # downloaded.
508 if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
509 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
510
511 def build_mirror_data(self, ud, d):
512 495
513 # Create as a temp file and move atomically into position to avoid races 496 def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None):
514 @contextmanager 497 """Helper method for fetching Git LFS data"""
515 def create_atomic(filename): 498 try:
516 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) 499 if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision):
517 try: 500 self._ensure_git_lfs(d, ud)
518 yield tfile 501
519 umask = os.umask(0o666) 502 # Using worktree with the revision because .lfsconfig may exists
520 os.umask(umask) 503 worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision)
521 os.chmod(tfile, (0o666 & ~umask)) 504 runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir)
522 os.rename(tfile, filename) 505 lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "")
523 finally: 506 runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt"))
524 os.close(fd) 507 worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd
508 runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir)
509 except:
510 logger.warning("Fetching LFS did not succeed.")
511
512 @contextmanager
513 def create_atomic(self, filename):
514 """Create as a temp file and move atomically into position to avoid races"""
515 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
516 try:
517 yield tfile
518 umask = os.umask(0o666)
519 os.umask(umask)
520 os.chmod(tfile, (0o666 & ~umask))
521 os.rename(tfile, filename)
522 finally:
523 os.close(fd)
525 524
525 def build_mirror_data(self, ud, d):
526 if ud.shallow and ud.write_shallow_tarballs: 526 if ud.shallow and ud.write_shallow_tarballs:
527 if not os.path.exists(ud.fullshallow): 527 if not os.path.exists(ud.fullshallow):
528 if os.path.islink(ud.fullshallow): 528 if os.path.islink(ud.fullshallow):
529 os.unlink(ud.fullshallow) 529 os.unlink(ud.fullshallow)
530 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 530 self.clone_shallow_with_tarball(ud, d)
531 shallowclone = os.path.join(tempdir, 'git')
532 try:
533 self.clone_shallow_local(ud, shallowclone, d)
534
535 logger.info("Creating tarball of git repository")
536 with create_atomic(ud.fullshallow) as tfile:
537 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
538 runfetchcmd("touch %s.done" % ud.fullshallow, d)
539 finally:
540 bb.utils.remove(tempdir, recurse=True)
541 elif ud.write_tarballs and not os.path.exists(ud.fullmirror): 531 elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
542 if os.path.islink(ud.fullmirror): 532 if os.path.islink(ud.fullmirror):
543 os.unlink(ud.fullmirror) 533 os.unlink(ud.fullmirror)
544 534
545 logger.info("Creating tarball of git repository") 535 logger.info("Creating tarball of git repository")
546 with create_atomic(ud.fullmirror) as tfile: 536 with self.create_atomic(ud.fullmirror) as tfile:
547 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d, 537 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
548 quiet=True, workdir=ud.clonedir) 538 quiet=True, workdir=ud.clonedir)
549 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." 539 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
550 % (tfile, mtime), d, workdir=ud.clonedir) 540 % (tfile, mtime), d, workdir=ud.clonedir)
551 runfetchcmd("touch %s.done" % ud.fullmirror, d) 541 runfetchcmd("touch %s.done" % ud.fullmirror, d)
552 542
543 def clone_shallow_with_tarball(self, ud, d):
544 ret = False
545 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
546 shallowclone = os.path.join(tempdir, 'git')
547 try:
548 try:
549 self.clone_shallow_local(ud, shallowclone, d)
550 except:
551 logger.warning("Fast shallow clone failed, try to skip fast mode now.")
552 bb.utils.remove(tempdir, recurse=True)
553 os.mkdir(tempdir)
554 ud.shallow_skip_fast = True
555 self.clone_shallow_local(ud, shallowclone, d)
556 logger.info("Creating tarball of git repository")
557 with self.create_atomic(ud.fullshallow) as tfile:
558 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
559 runfetchcmd("touch %s.done" % ud.fullshallow, d)
560 ret = True
561 finally:
562 bb.utils.remove(tempdir, recurse=True)
563
564 return ret
565
553 def clone_shallow_local(self, ud, dest, d): 566 def clone_shallow_local(self, ud, dest, d):
554 """Clone the repo and make it shallow. 567 """
568 Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
569 - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
570 - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
571 """
555 572
556 The upstream url of the new clone isn't set at this time, as it'll be 573 progresshandler = GitProgressHandler(d)
557 set correctly when unpacked.""" 574 repourl = self._get_repo_url(ud)
558 runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) 575 bb.utils.mkdirhier(dest)
576 init_cmd = "%s init -q" % ud.basecmd
577 if ud.bareclone:
578 init_cmd += " --bare"
579 runfetchcmd(init_cmd, d, workdir=dest)
580 # Use repourl when creating a fast initial shallow clone
581 # Prefer already existing full bare clones if available
582 if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir):
583 remote = shlex.quote(repourl)
584 else:
585 remote = ud.clonedir
586 runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest)
559 587
560 to_parse, shallow_branches = [], [] 588 # Check the histories which should be excluded
561 for name in ud.names: 589 shallow_exclude = ''
562 revision = ud.revisions[name] 590 for revision in ud.shallow_revs:
563 depth = ud.shallow_depths[name] 591 shallow_exclude += " --shallow-exclude=%s" % revision
564 if depth:
565 to_parse.append('%s~%d^{}' % (revision, depth - 1))
566 592
567 # For nobranch, we need a ref, otherwise the commits will be 593 revision = ud.revision
568 # removed, and for non-nobranch, we truncate the branch to our 594 depth = ud.shallow_depths[ud.name]
569 # srcrev, to avoid keeping unnecessary history beyond that.
570 branch = ud.branches[name]
571 if ud.nobranch:
572 ref = "refs/shallow/%s" % name
573 elif ud.bareclone:
574 ref = "refs/heads/%s" % branch
575 else:
576 ref = "refs/remotes/origin/%s" % branch
577 595
578 shallow_branches.append(ref) 596 # The --depth and --shallow-exclude can't be used together
579 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) 597 if depth and shallow_exclude:
598 raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
599
600 # For nobranch, we need a ref, otherwise the commits will be
601 # removed, and for non-nobranch, we truncate the branch to our
602 # srcrev, to avoid keeping unnecessary history beyond that.
603 branch = ud.branch
604 if ud.nobranch:
605 ref = "refs/shallow/%s" % ud.name
606 elif ud.bareclone:
607 ref = "refs/heads/%s" % branch
608 else:
609 ref = "refs/remotes/origin/%s" % branch
610
611 fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
612 if depth:
613 fetch_cmd += " --depth %s" % depth
614
615 if shallow_exclude:
616 fetch_cmd += shallow_exclude
580 617
581 # Map srcrev+depths to revisions 618 # Advertise the revision for lower version git such as 2.25.1:
582 parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) 619 # error: Server does not allow request for unadvertised object.
620 # The ud.clonedir is a local temporary dir, will be removed when
621 # fetch is done, so we can do anything on it.
622 adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
623 if ud.shallow_skip_fast:
624 runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
583 625
584 # Resolve specified revisions 626 runfetchcmd(fetch_cmd, d, workdir=dest)
585 parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) 627 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
586 shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() 628 # Fetch Git LFS data
629 self.lfs_fetch(ud, d, dest, ud.revision)
587 630
588 # Apply extra ref wildcards 631 # Apply extra ref wildcards
589 all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, 632 all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
590 d, workdir=dest).splitlines() 633 d, workdir=dest).splitlines()
634 all_refs = []
635 for line in all_refs_remote:
636 all_refs.append(line.split()[-1])
637 extra_refs = []
591 for r in ud.shallow_extra_refs: 638 for r in ud.shallow_extra_refs:
592 if not ud.bareclone: 639 if not ud.bareclone:
593 r = r.replace('refs/heads/', 'refs/remotes/origin/') 640 r = r.replace('refs/heads/', 'refs/remotes/origin/')
594 641
595 if '*' in r: 642 if '*' in r:
596 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) 643 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
597 shallow_branches.extend(matches) 644 extra_refs.extend(matches)
598 else: 645 else:
599 shallow_branches.append(r) 646 extra_refs.append(r)
600 647
601 # Make the repository shallow 648 for ref in extra_refs:
602 shallow_cmd = [self.make_shallow_path, '-s'] 649 ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '')
603 for b in shallow_branches: 650 runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
604 shallow_cmd.append('-r') 651 revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
605 shallow_cmd.append(b) 652 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
606 shallow_cmd.extend(shallow_revisions) 653
607 runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest) 654 # The url is local ud.clonedir, set it to upstream one
655 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
608 656
609 def unpack(self, ud, destdir, d): 657 def unpack(self, ud, destdir, d):
610 """ unpack the downloaded src to destdir""" 658 """ unpack the downloaded src to destdir"""
@@ -612,7 +660,7 @@ class Git(FetchMethod):
612 subdir = ud.parm.get("subdir") 660 subdir = ud.parm.get("subdir")
613 subpath = ud.parm.get("subpath") 661 subpath = ud.parm.get("subpath")
614 readpathspec = "" 662 readpathspec = ""
615 def_destsuffix = "git/" 663 def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/"
616 664
617 if subpath: 665 if subpath:
618 readpathspec = ":%s" % subpath 666 readpathspec = ":%s" % subpath
@@ -664,30 +712,43 @@ class Git(FetchMethod):
664 if not source_found: 712 if not source_found:
665 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) 713 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
666 714
715 # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
716 # matches the revision
717 if 'tag' in ud.parm and sha1_re.match(ud.revision):
718 output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir)
719 output = output.strip()
720 if output != ud.revision:
721 # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list
722 # If it resolves to the same thing there isn't a problem.
723 output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
724 output2 = output2.strip()
725 if output != output2:
726 raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url)
727
667 repourl = self._get_repo_url(ud) 728 repourl = self._get_repo_url(ud)
668 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) 729 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
669 730
670 if self._contains_lfs(ud, d, destdir): 731 if self._contains_lfs(ud, d, destdir):
671 if need_lfs and not self._find_git_lfs(d): 732 if not need_lfs:
672 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
673 elif not need_lfs:
674 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) 733 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
675 else: 734 else:
735 self._ensure_git_lfs(d, ud)
736
676 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir) 737 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
677 738
678 if not ud.nocheckout: 739 if not ud.nocheckout:
679 if subpath: 740 if subpath:
680 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, 741 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d,
681 workdir=destdir) 742 workdir=destdir)
682 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) 743 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
683 elif not ud.nobranch: 744 elif not ud.nobranch:
684 branchname = ud.branches[ud.names[0]] 745 branchname = ud.branch
685 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ 746 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
686 ud.revisions[ud.names[0]]), d, workdir=destdir) 747 ud.revision), d, workdir=destdir)
687 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ 748 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
688 branchname), d, workdir=destdir) 749 branchname), d, workdir=destdir)
689 else: 750 else:
690 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) 751 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
691 752
692 return True 753 return True
693 754
@@ -701,8 +762,13 @@ class Git(FetchMethod):
701 clonedir = os.path.realpath(ud.localpath) 762 clonedir = os.path.realpath(ud.localpath)
702 to_remove.append(clonedir) 763 to_remove.append(clonedir)
703 764
765 # Remove shallow mirror tarball
766 if ud.shallow:
767 to_remove.append(ud.fullshallow)
768 to_remove.append(ud.fullshallow + ".done")
769
704 for r in to_remove: 770 for r in to_remove:
705 if os.path.exists(r): 771 if os.path.exists(r) or os.path.islink(r):
706 bb.note('Removing %s' % r) 772 bb.note('Removing %s' % r)
707 bb.utils.remove(r, True) 773 bb.utils.remove(r, True)
708 774
@@ -713,10 +779,10 @@ class Git(FetchMethod):
713 cmd = "" 779 cmd = ""
714 if ud.nobranch: 780 if ud.nobranch:
715 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( 781 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
716 ud.basecmd, ud.revisions[name]) 782 ud.basecmd, ud.revision)
717 else: 783 else:
718 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( 784 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
719 ud.basecmd, ud.revisions[name], ud.branches[name]) 785 ud.basecmd, ud.revision, ud.branch)
720 try: 786 try:
721 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 787 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
722 except bb.fetch2.FetchError: 788 except bb.fetch2.FetchError:
@@ -725,19 +791,21 @@ class Git(FetchMethod):
725 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 791 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
726 return output.split()[0] != "0" 792 return output.split()[0] != "0"
727 793
728 def _lfs_objects_downloaded(self, ud, d, name, wd): 794 def _lfs_objects_downloaded(self, ud, d, wd):
729 """ 795 """
730 Verifies whether the LFS objects for requested revisions have already been downloaded 796 Verifies whether the LFS objects for requested revisions have already been downloaded
731 """ 797 """
732 # Bail out early if this repository doesn't use LFS 798 # Bail out early if this repository doesn't use LFS
733 if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd): 799 if not self._contains_lfs(ud, d, wd):
734 return True 800 return True
735 801
802 self._ensure_git_lfs(d, ud)
803
736 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file 804 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
737 # existence. 805 # existence.
738 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git 806 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
739 cmd = "%s lfs ls-files -l %s" \ 807 cmd = "%s lfs ls-files -l %s" \
740 % (ud.basecmd, ud.revisions[name]) 808 % (ud.basecmd, ud.revision)
741 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip() 809 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
742 # Do not do any further matching if no objects are managed by LFS 810 # Do not do any further matching if no objects are managed by LFS
743 if not output: 811 if not output:
@@ -761,18 +829,8 @@ class Git(FetchMethod):
761 """ 829 """
762 Check if the repository has 'lfs' (large file) content 830 Check if the repository has 'lfs' (large file) content
763 """ 831 """
764
765 if ud.nobranch:
766 # If no branch is specified, use the current git commit
767 refname = self._build_revision(ud, d, ud.names[0])
768 elif wd == ud.clonedir:
769 # The bare clonedir doesn't use the remote names; it has the branch immediately.
770 refname = ud.branches[ud.names[0]]
771 else:
772 refname = "origin/%s" % ud.branches[ud.names[0]]
773
774 cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( 832 cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
775 ud.basecmd, refname) 833 ud.basecmd, ud.revision)
776 834
777 try: 835 try:
778 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 836 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
@@ -782,12 +840,14 @@ class Git(FetchMethod):
782 pass 840 pass
783 return False 841 return False
784 842
785 def _find_git_lfs(self, d): 843 def _ensure_git_lfs(self, d, ud):
786 """ 844 """
787 Return True if git-lfs can be found, False otherwise. 845 Ensures that git-lfs is available, raising a FetchError if it isn't.
788 """ 846 """
789 import shutil 847 if shutil.which("git-lfs", path=d.getVar('PATH')) is None:
790 return shutil.which("git-lfs", path=d.getVar('PATH')) is not None 848 raise bb.fetch2.FetchError(
849 "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 "
850 "to ignore it)" % self._get_repo_url(ud))
791 851
792 def _get_repo_url(self, ud): 852 def _get_repo_url(self, ud):
793 """ 853 """
@@ -795,21 +855,21 @@ class Git(FetchMethod):
795 """ 855 """
796 # Note that we do not support passwords directly in the git urls. There are several 856 # Note that we do not support passwords directly in the git urls. There are several
797 # reasons. SRC_URI can be written out to things like buildhistory and people don't 857 # reasons. SRC_URI can be written out to things like buildhistory and people don't
798 # want to leak passwords like that. Its also all too easy to share metadata without 858 # want to leak passwords like that. Its also all too easy to share metadata without
799 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as 859 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
800 # alternatives so we will not take patches adding password support here. 860 # alternatives so we will not take patches adding password support here.
801 if ud.user: 861 if ud.user:
802 username = ud.user + '@' 862 username = ud.user + '@'
803 else: 863 else:
804 username = "" 864 username = ""
805 return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) 865 return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path))
806 866
807 def _revision_key(self, ud, d, name): 867 def _revision_key(self, ud, d, name):
808 """ 868 """
809 Return a unique key for the url 869 Return a unique key for the url
810 """ 870 """
811 # Collapse adjacent slashes 871 # Collapse adjacent slashes
812 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] 872 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev
813 873
814 def _lsremote(self, ud, d, search): 874 def _lsremote(self, ud, d, search):
815 """ 875 """
@@ -842,26 +902,26 @@ class Git(FetchMethod):
842 Compute the HEAD revision for the url 902 Compute the HEAD revision for the url
843 """ 903 """
844 if not d.getVar("__BBSRCREV_SEEN"): 904 if not d.getVar("__BBSRCREV_SEEN"):
845 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path)) 905 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path))
846 906
847 # Ensure we mark as not cached 907 # Ensure we mark as not cached
848 bb.fetch2.mark_recipe_nocache(d) 908 bb.fetch2.mark_recipe_nocache(d)
849 909
850 output = self._lsremote(ud, d, "") 910 output = self._lsremote(ud, d, "")
851 # Tags of the form ^{} may not work, need to fallback to other form 911 # Tags of the form ^{} may not work, need to fallback to other form
852 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: 912 if ud.unresolvedrev[:5] == "refs/" or ud.usehead:
853 head = ud.unresolvedrev[name] 913 head = ud.unresolvedrev
854 tag = ud.unresolvedrev[name] 914 tag = ud.unresolvedrev
855 else: 915 else:
856 head = "refs/heads/%s" % ud.unresolvedrev[name] 916 head = "refs/heads/%s" % ud.unresolvedrev
857 tag = "refs/tags/%s" % ud.unresolvedrev[name] 917 tag = "refs/tags/%s" % ud.unresolvedrev
858 for s in [head, tag + "^{}", tag]: 918 for s in [head, tag + "^{}", tag]:
859 for l in output.strip().split('\n'): 919 for l in output.strip().split('\n'):
860 sha1, ref = l.split() 920 sha1, ref = l.split()
861 if s == ref: 921 if s == ref:
862 return sha1 922 return sha1
863 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ 923 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
864 (ud.unresolvedrev[name], ud.host+ud.path)) 924 (ud.unresolvedrev, ud.host+ud.path))
865 925
866 def latest_versionstring(self, ud, d): 926 def latest_versionstring(self, ud, d):
867 """ 927 """
@@ -912,23 +972,22 @@ class Git(FetchMethod):
912 return pupver 972 return pupver
913 973
914 def _build_revision(self, ud, d, name): 974 def _build_revision(self, ud, d, name):
915 return ud.revisions[name] 975 return ud.revision
916 976
917 def gitpkgv_revision(self, ud, d, name): 977 def gitpkgv_revision(self, ud, d, name):
918 """ 978 """
919 Return a sortable revision number by counting commits in the history 979 Return a sortable revision number by counting commits in the history
920 Based on gitpkgv.bblass in meta-openembedded 980 Based on gitpkgv.bblass in meta-openembedded
921 """ 981 """
922 rev = self._build_revision(ud, d, name) 982 rev = ud.revision
923 localpath = ud.localpath 983 localpath = ud.localpath
924 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) 984 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
925 if not os.path.exists(localpath): 985 if not os.path.exists(localpath):
926 commits = None 986 commits = None
927 else: 987 else:
928 if not os.path.exists(rev_file) or not os.path.getsize(rev_file): 988 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
929 from pipes import quote
930 commits = bb.fetch2.runfetchcmd( 989 commits = bb.fetch2.runfetchcmd(
931 "git rev-list %s -- | wc -l" % quote(rev), 990 "git rev-list %s -- | wc -l" % shlex.quote(rev),
932 d, quiet=True).strip().lstrip('0') 991 d, quiet=True).strip().lstrip('0')
933 if commits: 992 if commits:
934 open(rev_file, "w").write("%d\n" % int(commits)) 993 open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index f7f3af7212..5869e1b99b 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -62,36 +62,35 @@ class GitSM(Git):
62 return modules 62 return modules
63 63
64 # Collect the defined submodules, and their attributes 64 # Collect the defined submodules, and their attributes
65 for name in ud.names: 65 try:
66 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=workdir)
67 except:
68 # No submodules to update
69 gitmodules = ""
70
71 for m, md in parse_gitmodules(gitmodules).items():
66 try: 72 try:
67 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) 73 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revision, md['path']), d, quiet=True, workdir=workdir)
68 except: 74 except:
69 # No submodules to update 75 # If the command fails, we don't have a valid file to check. If it doesn't
76 # fail -- it still might be a failure, see next check...
77 module_hash = ""
78
79 if not module_hash:
80 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
70 continue 81 continue
71 82
72 for m, md in parse_gitmodules(gitmodules).items(): 83 submodules.append(m)
73 try: 84 paths[m] = md['path']
74 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) 85 revision[m] = ud.revision
75 except: 86 uris[m] = md['url']
76 # If the command fails, we don't have a valid file to check. If it doesn't 87 subrevision[m] = module_hash.split()[2]
77 # fail -- it still might be a failure, see next check... 88
78 module_hash = "" 89 # Convert relative to absolute uri based on parent uri
79 90 if uris[m].startswith('..') or uris[m].startswith('./'):
80 if not module_hash: 91 newud = copy.copy(ud)
81 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) 92 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
82 continue 93 uris[m] = Git._get_repo_url(self, newud)
83
84 submodules.append(m)
85 paths[m] = md['path']
86 revision[m] = ud.revisions[name]
87 uris[m] = md['url']
88 subrevision[m] = module_hash.split()[2]
89
90 # Convert relative to absolute uri based on parent uri
91 if uris[m].startswith('..') or uris[m].startswith('./'):
92 newud = copy.copy(ud)
93 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
94 uris[m] = Git._get_repo_url(self, newud)
95 94
96 for module in submodules: 95 for module in submodules:
97 # Translate the module url into a SRC_URI 96 # Translate the module url into a SRC_URI
@@ -123,7 +122,7 @@ class GitSM(Git):
123 url += ";name=%s" % module 122 url += ";name=%s" % module
124 url += ";subpath=%s" % module 123 url += ";subpath=%s" % module
125 url += ";nobranch=1" 124 url += ";nobranch=1"
126 url += ";lfs=%s" % self._need_lfs(ud) 125 url += ";lfs=%s" % ("1" if self._need_lfs(ud) else "0")
127 # Note that adding "user=" here to give credentials to the 126 # Note that adding "user=" here to give credentials to the
128 # submodule is not supported. Since using SRC_URI to give git:// 127 # submodule is not supported. Since using SRC_URI to give git://
129 # URL a password is not supported, one have to use one of the 128 # URL a password is not supported, one have to use one of the
@@ -147,6 +146,22 @@ class GitSM(Git):
147 146
148 return submodules != [] 147 return submodules != []
149 148
149 def call_process_submodules(self, ud, d, extra_check, subfunc):
150 # If we're using a shallow mirror tarball it needs to be
151 # unpacked temporarily so that we can examine the .gitmodules file
152 # Unpack even when ud.clonedir is not available,
153 # which may occur during a fast shallow clone
154 unpack = extra_check or not os.path.exists(ud.clonedir)
155 if ud.shallow and os.path.exists(ud.fullshallow) and unpack:
156 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
157 try:
158 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
159 self.process_submodules(ud, tmpdir, subfunc, d)
160 finally:
161 shutil.rmtree(tmpdir)
162 else:
163 self.process_submodules(ud, ud.clonedir, subfunc, d)
164
150 def need_update(self, ud, d): 165 def need_update(self, ud, d):
151 if Git.need_update(self, ud, d): 166 if Git.need_update(self, ud, d):
152 return True 167 return True
@@ -164,15 +179,7 @@ class GitSM(Git):
164 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) 179 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
165 need_update_result = True 180 need_update_result = True
166 181
167 # If we're using a shallow mirror tarball it needs to be unpacked 182 self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
168 # temporarily so that we can examine the .gitmodules file
169 if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
170 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
171 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
172 self.process_submodules(ud, tmpdir, need_update_submodule, d)
173 shutil.rmtree(tmpdir)
174 else:
175 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
176 183
177 if need_update_list: 184 if need_update_list:
178 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 185 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
@@ -195,16 +202,7 @@ class GitSM(Git):
195 raise 202 raise
196 203
197 Git.download(self, ud, d) 204 Git.download(self, ud, d)
198 205 self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
199 # If we're using a shallow mirror tarball it needs to be unpacked
200 # temporarily so that we can examine the .gitmodules file
201 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
202 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
203 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
204 self.process_submodules(ud, tmpdir, download_submodule, d)
205 shutil.rmtree(tmpdir)
206 else:
207 self.process_submodules(ud, ud.clonedir, download_submodule, d)
208 206
209 def unpack(self, ud, destdir, d): 207 def unpack(self, ud, destdir, d):
210 def unpack_submodules(ud, url, module, modpath, workdir, d): 208 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -247,15 +245,27 @@ class GitSM(Git):
247 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) 245 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
248 246
249 if not ud.bareclone and ret: 247 if not ud.bareclone and ret:
250 # All submodules should already be downloaded and configured in the tree. This simply 248 cmdprefix = ""
251 # sets up the configuration and checks out the files. The main project config should 249 # Avoid LFS smudging (replacing the LFS pointers with the actual content) when LFS shouldn't be used but git-lfs is installed.
252 # remain unmodified, and no download from the internet should occur. As such, lfs smudge 250 if not self._need_lfs(ud):
253 # should also be skipped as these files were already smudged in the fetch stage if lfs 251 cmdprefix = "GIT_LFS_SKIP_SMUDGE=1 "
254 # was enabled. 252 runfetchcmd("%s%s submodule update --recursive --no-fetch" % (cmdprefix, ud.basecmd), d, quiet=True, workdir=ud.destdir)
255 runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) 253 def clean(self, ud, d):
254 def clean_submodule(ud, url, module, modpath, workdir, d):
255 url += ";bareclone=1;nobranch=1"
256 try:
257 newfetch = Fetch([url], d, cache=False)
258 newfetch.clean()
259 except Exception as e:
260 logger.warning('gitsm: submodule clean failed: %s %s' % (type(e).__name__, str(e)))
261
262 self.call_process_submodules(ud, d, True, clean_submodule)
263
264 # Clean top git dir
265 Git.clean(self, ud, d)
256 266
257 def implicit_urldata(self, ud, d): 267 def implicit_urldata(self, ud, d):
258 import shutil, subprocess, tempfile 268 import subprocess
259 269
260 urldata = [] 270 urldata = []
261 def add_submodule(ud, url, module, modpath, workdir, d): 271 def add_submodule(ud, url, module, modpath, workdir, d):
@@ -263,14 +273,6 @@ class GitSM(Git):
263 newfetch = Fetch([url], d, cache=False) 273 newfetch = Fetch([url], d, cache=False)
264 urldata.extend(newfetch.expanded_urldata()) 274 urldata.extend(newfetch.expanded_urldata())
265 275
266 # If we're using a shallow mirror tarball it needs to be unpacked 276 self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
267 # temporarily so that we can examine the .gitmodules file
268 if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
269 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
270 subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
271 self.process_submodules(ud, tmpdir, add_submodule, d)
272 shutil.rmtree(tmpdir)
273 else:
274 self.process_submodules(ud, ud.clonedir, add_submodule, d)
275 277
276 return urldata 278 return urldata
diff --git a/bitbake/lib/bb/fetch2/gomod.py b/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..53c1d8d115
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,273 @@
1"""
2BitBake 'Fetch' implementation for Go modules
3
4The gomod/gomodgit fetchers are used to download Go modules to the module cache
5from a module proxy or directly from a version control repository.
6
7Example SRC_URI:
8
9SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
10SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
11
12Required SRC_URI parameters:
13
14- version
15 The version of the module.
16
17Optional SRC_URI parameters:
18
19- mod
20 Fetch and unpack the go.mod file only instead of the complete module.
21 The go command may need to download go.mod files for many different modules
22 when computing the build list, and go.mod files are much smaller than
23 module zip files.
24 The default is "0", set mod=1 for the go.mod file only.
25
26- sha256sum
27 The checksum of the module zip file, or the go.mod file in case of fetching
28 only the go.mod file. Alternatively, set the SRC_URI varible flag for
29 "module@version.sha256sum".
30
31- protocol
32 The method used when fetching directly from a version control repository.
33 The default is "https" for git.
34
35- repo
36 The URL when fetching directly from a version control repository. Required
37 when the URL is different from the module path.
38
39- srcrev
40 The revision identifier used when fetching directly from a version control
41 repository. Alternatively, set the SRCREV varible for "module@version".
42
43- subdir
44 The module subdirectory when fetching directly from a version control
45 repository. Required when the module is not located in the root of the
46 repository.
47
48Related variables:
49
50- GO_MOD_PROXY
51 The module proxy used by the fetcher.
52
53- GO_MOD_CACHE_DIR
54 The directory where the module cache is located.
55 This must match the exported GOMODCACHE variable for the go command to find
56 the downloaded modules.
57
58See the Go modules reference, https://go.dev/ref/mod, for more information
59about the module cache, module proxies and version control systems.
60"""
61
62import hashlib
63import os
64import re
65import shutil
66import subprocess
67import zipfile
68
69import bb
70from bb.fetch2 import FetchError
71from bb.fetch2 import MissingParameterError
72from bb.fetch2 import runfetchcmd
73from bb.fetch2 import subprocess_setup
74from bb.fetch2.git import Git
75from bb.fetch2.wget import Wget
76
77
78def escape(path):
79 """Escape capital letters using exclamation points."""
80 return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
81
82
83class GoMod(Wget):
84 """Class to fetch Go modules from a Go module proxy via wget"""
85
86 def supports(self, ud, d):
87 """Check to see if a given URL is for this fetcher."""
88 return ud.type == 'gomod'
89
90 def urldata_init(self, ud, d):
91 """Set up to download the module from the module proxy.
92
93 Set up to download the module zip file to the module cache directory
94 and unpack the go.mod file (unless downloading only the go.mod file):
95
96 cache/download/<module>/@v/<version>.zip: The module zip file.
97 cache/download/<module>/@v/<version>.mod: The go.mod file.
98 """
99
100 proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
101 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
102
103 if 'version' not in ud.parm:
104 raise MissingParameterError('version', ud.url)
105
106 module = ud.host
107 if ud.path != '/':
108 module += ud.path
109 ud.parm['module'] = module
110 version = ud.parm['version']
111
112 # Set URL and filename for wget download
113 if ud.parm.get('mod', '0') == '1':
114 ext = '.mod'
115 else:
116 ext = '.zip'
117 path = escape(f"{module}/@v/{version}{ext}")
118 ud.url = bb.fetch2.encodeurl(
119 ('https', proxy, '/' + path, None, None, None))
120 ud.parm['downloadfilename'] = f"{module.replace('/', '.')}@{version}{ext}"
121
122 # Set name for checksum verification
123 ud.parm['name'] = f"{module}@{version}"
124
125 # Set path for unpack
126 ud.parm['unpackpath'] = os.path.join(moddir, 'cache/download', path)
127
128 super().urldata_init(ud, d)
129
130 def unpack(self, ud, rootdir, d):
131 """Unpack the module in the module cache."""
132
133 # Unpack the module zip file or go.mod file
134 unpackpath = os.path.join(rootdir, ud.parm['unpackpath'])
135 unpackdir = os.path.dirname(unpackpath)
136 bb.utils.mkdirhier(unpackdir)
137 ud.unpack_tracer.unpack("file-copy", unpackdir)
138 cmd = f"cp {ud.localpath} {unpackpath}"
139 path = d.getVar('PATH')
140 if path:
141 cmd = f"PATH={path} {cmd}"
142 name = os.path.basename(unpackpath)
143 bb.note(f"Unpacking {name} to {unpackdir}/")
144 subprocess.check_call(cmd, shell=True, preexec_fn=subprocess_setup)
145
146 if name.endswith('.zip'):
147 # Unpack the go.mod file from the zip file
148 module = ud.parm['module']
149 name = name.rsplit('.', 1)[0] + '.mod'
150 bb.note(f"Unpacking {name} to {unpackdir}/")
151 with zipfile.ZipFile(ud.localpath) as zf:
152 with open(os.path.join(unpackdir, name), mode='wb') as mf:
153 try:
154 f = module + '@' + ud.parm['version'] + '/go.mod'
155 shutil.copyfileobj(zf.open(f), mf)
156 except KeyError:
157 # If the module does not have a go.mod file, synthesize
158 # one containing only a module statement.
159 mf.write(f'module {module}\n'.encode())
160
161
162class GoModGit(Git):
163 """Class to fetch Go modules directly from a git repository"""
164
165 def supports(self, ud, d):
166 """Check to see if a given URL is for this fetcher."""
167 return ud.type == 'gomodgit'
168
169 def urldata_init(self, ud, d):
170 """Set up to download the module from the git repository.
171
172 Set up to download the git repository to the module cache directory and
173 unpack the module zip file and the go.mod file:
174
175 cache/vcs/<hash>: The bare git repository.
176 cache/download/<module>/@v/<version>.zip: The module zip file.
177 cache/download/<module>/@v/<version>.mod: The go.mod file.
178 """
179
180 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
181
182 if 'version' not in ud.parm:
183 raise MissingParameterError('version', ud.url)
184
185 module = ud.host
186 if ud.path != '/':
187 module += ud.path
188 ud.parm['module'] = module
189
190 # Set host, path and srcrev for git download
191 if 'repo' in ud.parm:
192 repo = ud.parm['repo']
193 idx = repo.find('/')
194 if idx != -1:
195 ud.host = repo[:idx]
196 ud.path = repo[idx:]
197 else:
198 ud.host = repo
199 ud.path = ''
200 if 'protocol' not in ud.parm:
201 ud.parm['protocol'] = 'https'
202 ud.name = f"{module}@{ud.parm['version']}"
203 srcrev = d.getVar('SRCREV_' + ud.name)
204 if srcrev:
205 if 'srcrev' not in ud.parm:
206 ud.parm['srcrev'] = srcrev
207 else:
208 if 'srcrev' in ud.parm:
209 d.setVar('SRCREV_' + ud.name, ud.parm['srcrev'])
210 if 'branch' not in ud.parm:
211 ud.parm['nobranch'] = '1'
212
213 # Set subpath, subdir and bareclone for git unpack
214 if 'subdir' in ud.parm:
215 ud.parm['subpath'] = ud.parm['subdir']
216 key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
217 ud.parm['key'] = key
218 ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
219 hashlib.sha256(key).hexdigest())
220 ud.parm['bareclone'] = '1'
221
222 super().urldata_init(ud, d)
223
224 def unpack(self, ud, rootdir, d):
225 """Unpack the module in the module cache."""
226
227 # Unpack the bare git repository
228 super().unpack(ud, rootdir, d)
229
230 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
231
232 # Create the info file
233 module = ud.parm['module']
234 repodir = os.path.join(rootdir, ud.parm['subdir'])
235 with open(repodir + '.info', 'wb') as f:
236 f.write(ud.parm['key'])
237
238 # Unpack the go.mod file from the repository
239 unpackdir = os.path.join(rootdir, moddir, 'cache/download',
240 escape(module), '@v')
241 bb.utils.mkdirhier(unpackdir)
242 srcrev = ud.parm['srcrev']
243 version = ud.parm['version']
244 escaped_version = escape(version)
245 cmd = f"git ls-tree -r --name-only '{srcrev}'"
246 if 'subpath' in ud.parm:
247 cmd += f" '{ud.parm['subpath']}'"
248 files = runfetchcmd(cmd, d, workdir=repodir).split()
249 name = escaped_version + '.mod'
250 bb.note(f"Unpacking {name} to {unpackdir}/")
251 with open(os.path.join(unpackdir, name), mode='wb') as mf:
252 f = 'go.mod'
253 if 'subpath' in ud.parm:
254 f = os.path.join(ud.parm['subpath'], f)
255 if f in files:
256 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
257 subprocess.check_call(cmd, stdout=mf, cwd=repodir,
258 preexec_fn=subprocess_setup)
259 else:
260 # If the module does not have a go.mod file, synthesize one
261 # containing only a module statement.
262 mf.write(f'module {module}\n'.encode())
263
264 # Synthesize the module zip file from the repository
265 name = escaped_version + '.zip'
266 bb.note(f"Unpacking {name} to {unpackdir}/")
267 with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
268 prefix = module + '@' + version + '/'
269 for f in files:
270 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
271 data = subprocess.check_output(cmd, cwd=repodir,
272 preexec_fn=subprocess_setup)
273 zf.writestr(prefix + f, data)
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index 7d7668110e..fda56a564e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -29,11 +29,10 @@ class Local(FetchMethod):
29 29
30 def urldata_init(self, ud, d): 30 def urldata_init(self, ud, d):
31 # We don't set localfile as for this fetcher the file is already local! 31 # We don't set localfile as for this fetcher the file is already local!
32 ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) 32 ud.basename = os.path.basename(ud.path)
33 ud.basename = os.path.basename(ud.decodedurl) 33 ud.basepath = ud.path
34 ud.basepath = ud.decodedurl
35 ud.needdonestamp = False 34 ud.needdonestamp = False
36 if "*" in ud.decodedurl: 35 if "*" in ud.path:
37 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) 36 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
38 return 37 return
39 38
@@ -48,7 +47,7 @@ class Local(FetchMethod):
48 Return the local filename of a given url assuming a successful fetch. 47 Return the local filename of a given url assuming a successful fetch.
49 """ 48 """
50 searched = [] 49 searched = []
51 path = urldata.decodedurl 50 path = urldata.path
52 newpath = path 51 newpath = path
53 if path[0] == "/": 52 if path[0] == "/":
54 logger.debug2("Using absolute %s" % (path)) 53 logger.debug2("Using absolute %s" % (path))
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 15f3f19bc8..e469d66768 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -42,11 +42,12 @@ from bb.utils import is_semver
42 42
43def npm_package(package): 43def npm_package(package):
44 """Convert the npm package name to remove unsupported character""" 44 """Convert the npm package name to remove unsupported character"""
45 # Scoped package names (with the @) use the same naming convention 45 # For scoped package names ('@user/package') the '/' is replaced by a '-'.
46 # as the 'npm pack' command. 46 # This is similar to what 'npm pack' does, but 'npm pack' also strips the
47 # leading '@', which can lead to ambiguous package names.
47 name = re.sub("/", "-", package) 48 name = re.sub("/", "-", package)
48 name = name.lower() 49 name = name.lower()
49 name = re.sub(r"[^\-a-z0-9]", "", name) 50 name = re.sub(r"[^\-a-z0-9@]", "", name)
50 name = name.strip("-") 51 name = name.strip("-")
51 return name 52 return name
52 53
@@ -90,6 +91,12 @@ class NpmEnvironment(object):
90 self.d = d 91 self.d = d
91 92
92 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1) 93 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
94
95 hn = self._home_npmrc(d)
96 if hn is not None:
97 with open(hn, 'r') as hnf:
98 self.user_config.write(hnf.read())
99
93 for key, value in configs: 100 for key, value in configs:
94 self.user_config.write("%s=%s\n" % (key, value)) 101 self.user_config.write("%s=%s\n" % (key, value))
95 102
@@ -102,6 +109,15 @@ class NpmEnvironment(object):
102 if self.user_config: 109 if self.user_config:
103 self.user_config.close() 110 self.user_config.close()
104 111
112 def _home_npmrc(self, d):
113 """Function to return user's HOME .npmrc file (or None if it doesn't exist)"""
114 home_npmrc_file = os.path.join(os.environ.get("HOME"), ".npmrc")
115 if d.getVar("BB_USE_HOME_NPMRC") == "1" and os.path.exists(home_npmrc_file):
116 bb.warn(f"BB_USE_HOME_NPMRC flag set and valid .npmrc detected - "\
117 f"npm fetcher will use {home_npmrc_file}")
118 return home_npmrc_file
119 return None
120
105 def run(self, cmd, args=None, configs=None, workdir=None): 121 def run(self, cmd, args=None, configs=None, workdir=None):
106 """Run npm command in a controlled environment""" 122 """Run npm command in a controlled environment"""
107 with tempfile.TemporaryDirectory() as tmpdir: 123 with tempfile.TemporaryDirectory() as tmpdir:
@@ -165,7 +181,7 @@ class Npm(FetchMethod):
165 # Using the 'downloadfilename' parameter as local filename 181 # Using the 'downloadfilename' parameter as local filename
166 # or the npm package name. 182 # or the npm package name.
167 if "downloadfilename" in ud.parm: 183 if "downloadfilename" in ud.parm:
168 ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"])) 184 ud.localfile = npm_localfile(ud.parm["downloadfilename"])
169 else: 185 else:
170 ud.localfile = npm_localfile(ud.package, ud.version) 186 ud.localfile = npm_localfile(ud.package, ud.version)
171 187
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index ff5f8dc755..2f9599ee9e 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -37,38 +37,26 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
37 """ 37 """
38 Run a callback for each dependencies of a shrinkwrap file. 38 Run a callback for each dependencies of a shrinkwrap file.
39 The callback is using the format: 39 The callback is using the format:
40 callback(name, params, deptree) 40 callback(name, data, location)
41 with: 41 with:
42 name = the package name (string) 42 name = the package name (string)
43 params = the package parameters (dictionary) 43 data = the package data (dictionary)
44 destdir = the destination of the package (string) 44 location = the location of the package (string)
45 """ 45 """
46 # For handling old style dependencies entries in shinkwrap files 46 packages = shrinkwrap.get("packages")
47 def _walk_deps(deps, deptree): 47 if not packages:
48 for name in deps: 48 raise FetchError("Invalid shrinkwrap file format")
49 subtree = [*deptree, name] 49
50 _walk_deps(deps[name].get("dependencies", {}), subtree) 50 for location, data in packages.items():
51 if callback is not None: 51 # Skip empty main and local link target packages
52 if deps[name].get("dev", False) and not dev: 52 if not location.startswith('node_modules/'):
53 continue 53 continue
54 elif deps[name].get("bundled", False): 54 elif not dev and data.get("dev", False):
55 continue 55 continue
56 destsubdirs = [os.path.join("node_modules", dep) for dep in subtree] 56 elif data.get("inBundle", False):
57 destsuffix = os.path.join(*destsubdirs) 57 continue
58 callback(name, deps[name], destsuffix) 58 name = location.split('node_modules/')[-1]
59 59 callback(name, data, location)
60 # packages entry means new style shrinkwrap file, else use dependencies
61 packages = shrinkwrap.get("packages", None)
62 if packages is not None:
63 for package in packages:
64 if package != "":
65 name = package.split('node_modules/')[-1]
66 package_infos = packages.get(package, {})
67 if dev == False and package_infos.get("dev", False):
68 continue
69 callback(name, package_infos, package)
70 else:
71 _walk_deps(shrinkwrap.get("dependencies", {}), [])
72 60
73class NpmShrinkWrap(FetchMethod): 61class NpmShrinkWrap(FetchMethod):
74 """Class to fetch all package from a shrinkwrap file""" 62 """Class to fetch all package from a shrinkwrap file"""
@@ -95,12 +83,18 @@ class NpmShrinkWrap(FetchMethod):
95 extrapaths = [] 83 extrapaths = []
96 unpack = True 84 unpack = True
97 85
98 integrity = params.get("integrity", None) 86 integrity = params.get("integrity")
99 resolved = params.get("resolved", None) 87 resolved = params.get("resolved")
100 version = params.get("version", None) 88 version = params.get("version")
89 link = params.get("link", False)
90
91 # Handle link sources
92 if link:
93 localpath = resolved
94 unpack = False
101 95
102 # Handle registry sources 96 # Handle registry sources
103 if is_semver(version) and integrity: 97 elif version and is_semver(version) and integrity:
104 # Handle duplicate dependencies without url 98 # Handle duplicate dependencies without url
105 if not resolved: 99 if not resolved:
106 return 100 return
@@ -128,10 +122,10 @@ class NpmShrinkWrap(FetchMethod):
128 extrapaths.append(resolvefile) 122 extrapaths.append(resolvefile)
129 123
130 # Handle http tarball sources 124 # Handle http tarball sources
131 elif version.startswith("http") and integrity: 125 elif resolved.startswith("http") and integrity:
132 localfile = npm_localfile(os.path.basename(version)) 126 localfile = npm_localfile(os.path.basename(resolved))
133 127
134 uri = URI(version) 128 uri = URI(resolved)
135 uri.params["downloadfilename"] = localfile 129 uri.params["downloadfilename"] = localfile
136 130
137 checksum_name, checksum_expected = npm_integrity(integrity) 131 checksum_name, checksum_expected = npm_integrity(integrity)
@@ -141,28 +135,12 @@ class NpmShrinkWrap(FetchMethod):
141 135
142 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 136 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
143 137
144 # Handle local tarball and link sources 138 # Handle local tarball sources
145 elif version.startswith("file"): 139 elif resolved.startswith("file"):
146 localpath = version[5:] 140 localpath = resolved[5:]
147 if not version.endswith(".tgz"):
148 unpack = False
149 141
150 # Handle git sources 142 # Handle git sources
151 elif version.startswith(("git", "bitbucket","gist")) or ( 143 elif resolved.startswith("git"):
152 not version.endswith((".tgz", ".tar", ".tar.gz"))
153 and not version.startswith((".", "@", "/"))
154 and "/" in version
155 ):
156 if version.startswith("github:"):
157 version = "git+https://github.com/" + version[len("github:"):]
158 elif version.startswith("gist:"):
159 version = "git+https://gist.github.com/" + version[len("gist:"):]
160 elif version.startswith("bitbucket:"):
161 version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
162 elif version.startswith("gitlab:"):
163 version = "git+https://gitlab.com/" + version[len("gitlab:"):]
164 elif not version.startswith(("git+","git:")):
165 version = "git+https://github.com/" + version
166 regex = re.compile(r""" 144 regex = re.compile(r"""
167 ^ 145 ^
168 git\+ 146 git\+
@@ -174,16 +152,16 @@ class NpmShrinkWrap(FetchMethod):
174 $ 152 $
175 """, re.VERBOSE) 153 """, re.VERBOSE)
176 154
177 match = regex.match(version) 155 match = regex.match(resolved)
178
179 if not match: 156 if not match:
180 raise ParameterError("Invalid git url: %s" % version, ud.url) 157 raise ParameterError("Invalid git url: %s" % resolved, ud.url)
181 158
182 groups = match.groupdict() 159 groups = match.groupdict()
183 160
184 uri = URI("git://" + str(groups["url"])) 161 uri = URI("git://" + str(groups["url"]))
185 uri.params["protocol"] = str(groups["protocol"]) 162 uri.params["protocol"] = str(groups["protocol"])
186 uri.params["rev"] = str(groups["rev"]) 163 uri.params["rev"] = str(groups["rev"])
164 uri.params["nobranch"] = "1"
187 uri.params["destsuffix"] = destsuffix 165 uri.params["destsuffix"] = destsuffix
188 166
189 url = str(uri) 167 url = str(uri)
@@ -268,7 +246,7 @@ class NpmShrinkWrap(FetchMethod):
268 246
269 def unpack(self, ud, rootdir, d): 247 def unpack(self, ud, rootdir, d):
270 """Unpack the downloaded dependencies""" 248 """Unpack the downloaded dependencies"""
271 destdir = d.getVar("S") 249 destdir = rootdir
272 destsuffix = ud.parm.get("destsuffix") 250 destsuffix = ud.parm.get("destsuffix")
273 if destsuffix: 251 if destsuffix:
274 destdir = os.path.join(rootdir, destsuffix) 252 destdir = os.path.join(rootdir, destsuffix)
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
index 6b8ffd5359..22c0538139 100644
--- a/bitbake/lib/bb/fetch2/s3.py
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -77,7 +77,7 @@ class S3(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" 82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
83 83
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index 7884cce949..bee71a0d0d 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -77,7 +77,7 @@ class SFTP(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 def download(self, ud, d): 82 def download(self, ud, d):
83 """Fetch urls""" 83 """Fetch urls"""
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 0cbb2a6f25..2a0f2cb44b 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -73,8 +73,7 @@ class SSH(FetchMethod):
73 path = m.group('path') 73 path = m.group('path')
74 path = urllib.parse.unquote(path) 74 path = urllib.parse.unquote(path)
75 host = m.group('host') 75 host = m.group('host')
76 urldata.localpath = os.path.join(d.getVar('DL_DIR'), 76 urldata.localfile = os.path.basename(os.path.normpath(path))
77 os.path.basename(os.path.normpath(path)))
78 77
79 def download(self, urldata, d): 78 def download(self, urldata, d):
80 dldir = d.getVar('DL_DIR') 79 dldir = d.getVar('DL_DIR')
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index d40e4d2909..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -210,3 +210,6 @@ class Svn(FetchMethod):
210 210
211 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
212 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index fbfa6938ac..7e43d3bc97 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -53,11 +53,6 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
53class Wget(FetchMethod): 53class Wget(FetchMethod):
54 """Class to fetch urls via 'wget'""" 54 """Class to fetch urls via 'wget'"""
55 55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail
57 # with the standard wget/urllib User-Agent, so pretend to be a modern
58 # browser.
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
60
61 def check_certs(self, d): 56 def check_certs(self, d):
62 """ 57 """
63 Should certificates be checked? 58 Should certificates be checked?
@@ -83,11 +78,11 @@ class Wget(FetchMethod):
83 else: 78 else:
84 ud.basename = os.path.basename(ud.path) 79 ud.basename = os.path.basename(ud.path)
85 80
86 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 81 ud.localfile = ud.basename
87 if not ud.localfile: 82 if not ud.localfile:
88 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 83 ud.localfile = ud.host + ud.path.replace("/", ".")
89 84
90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" 85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget --tries=2 --timeout=100"
91 86
92 if ud.type == 'ftp' or ud.type == 'ftps': 87 if ud.type == 'ftp' or ud.type == 'ftps':
93 self.basecmd += " --passive-ftp" 88 self.basecmd += " --passive-ftp"
@@ -101,16 +96,17 @@ class Wget(FetchMethod):
101 96
102 logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) 97 logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
103 bb.fetch2.check_network_access(d, command, ud.url) 98 bb.fetch2.check_network_access(d, command, ud.url)
104 runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) 99 runfetchcmd(command + ' --progress=dot --verbose', d, quiet, log=progresshandler, workdir=workdir)
105 100
106 def download(self, ud, d): 101 def download(self, ud, d):
107 """Fetch urls""" 102 """Fetch urls"""
108 103
109 fetchcmd = self.basecmd 104 fetchcmd = self.basecmd
110 105
111 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" 106 dldir = os.path.realpath(d.getVar("DL_DIR"))
107 localpath = os.path.join(dldir, ud.localfile) + ".tmp"
112 bb.utils.mkdirhier(os.path.dirname(localpath)) 108 bb.utils.mkdirhier(os.path.dirname(localpath))
113 fetchcmd += " -O %s" % shlex.quote(localpath) 109 fetchcmd += " --output-document=%s" % shlex.quote(localpath)
114 110
115 if ud.user and ud.pswd: 111 if ud.user and ud.pswd:
116 fetchcmd += " --auth-no-challenge" 112 fetchcmd += " --auth-no-challenge"
@@ -126,14 +122,18 @@ class Wget(FetchMethod):
126 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd) 122 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
127 123
128 uri = ud.url.split(";")[0] 124 uri = ud.url.split(";")[0]
129 if os.path.exists(ud.localpath): 125 fetchcmd += " --continue --directory-prefix=%s '%s'" % (dldir, uri)
130 # file exists, but we didnt complete it.. trying again..
131 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
132 else:
133 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
134
135 self._runwget(ud, d, fetchcmd, False) 126 self._runwget(ud, d, fetchcmd, False)
136 127
128 # Sanity check since wget can pretend it succeed when it didn't
129 # Also, this used to happen if sourceforge sent us to the mirror page
130 if not os.path.exists(localpath):
131 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
132
133 if os.path.getsize(localpath) == 0:
134 os.remove(localpath)
135 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
136
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the 137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same 138 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum) 139 # source, one with an incorrect checksum)
@@ -143,15 +143,6 @@ class Wget(FetchMethod):
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files 143 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4]) 144 os.rename(localpath, localpath[:-4])
145 145
146 # Sanity check since wget can pretend it succeed when it didn't
147 # Also, this used to happen if sourceforge sent us to the mirror page
148 if not os.path.exists(ud.localpath):
149 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
150
151 if os.path.getsize(ud.localpath) == 0:
152 os.remove(ud.localpath)
153 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
154
155 return True 146 return True
156 147
157 def checkstatus(self, fetch, ud, d, try_again=True): 148 def checkstatus(self, fetch, ud, d, try_again=True):
@@ -243,7 +234,12 @@ class Wget(FetchMethod):
243 fetch.connection_cache.remove_connection(h.host, h.port) 234 fetch.connection_cache.remove_connection(h.host, h.port)
244 raise urllib.error.URLError(err) 235 raise urllib.error.URLError(err)
245 else: 236 else:
246 r = h.getresponse() 237 try:
238 r = h.getresponse()
239 except TimeoutError as e:
240 if fetch.connection_cache:
241 fetch.connection_cache.remove_connection(h.host, h.port)
242 raise TimeoutError(e)
247 243
248 # Pick apart the HTTPResponse object to get the addinfourl 244 # Pick apart the HTTPResponse object to get the addinfourl
249 # object initialized properly. 245 # object initialized properly.
@@ -304,13 +300,45 @@ class Wget(FetchMethod):
304 300
305 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): 301 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
306 """ 302 """
307 urllib2.HTTPRedirectHandler resets the method to GET on redirect, 303 urllib2.HTTPRedirectHandler before 3.13 has two flaws:
308 when we want to follow redirects using the original method. 304
305 It resets the method to GET on redirect when we want to follow
306 redirects using the original method (typically HEAD). This was fixed
307 in 759e8e7.
308
309 It also doesn't handle 308 (Permanent Redirect). This was fixed in
310 c379bc5.
311
312 Until we depend on Python 3.13 onwards, copy the redirect_request
313 method to fix these issues.
309 """ 314 """
310 def redirect_request(self, req, fp, code, msg, headers, newurl): 315 def redirect_request(self, req, fp, code, msg, headers, newurl):
311 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) 316 m = req.get_method()
312 newreq.get_method = req.get_method 317 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
313 return newreq 318 or code in (301, 302, 303) and m == "POST")):
319 raise urllib.HTTPError(req.full_url, code, msg, headers, fp)
320
321 # Strictly (according to RFC 2616), 301 or 302 in response to
322 # a POST MUST NOT cause a redirection without confirmation
323 # from the user (of urllib.request, in this case). In practice,
324 # essentially all clients do redirect in this case, so we do
325 # the same.
326
327 # Be conciliant with URIs containing a space. This is mainly
328 # redundant with the more complete encoding done in http_error_302(),
329 # but it is kept for compatibility with other callers.
330 newurl = newurl.replace(' ', '%20')
331
332 CONTENT_HEADERS = ("content-length", "content-type")
333 newheaders = {k: v for k, v in req.headers.items()
334 if k.lower() not in CONTENT_HEADERS}
335 return urllib.request.Request(newurl,
336 method="HEAD" if m == "HEAD" else "GET",
337 headers=newheaders,
338 origin_req_host=req.origin_req_host,
339 unverifiable=True)
340
341 http_error_308 = urllib.request.HTTPRedirectHandler.http_error_302
314 342
315 # We need to update the environment here as both the proxy and HTTPS 343 # We need to update the environment here as both the proxy and HTTPS
316 # handlers need variables set. The proxy needs http_proxy and friends to 344 # handlers need variables set. The proxy needs http_proxy and friends to
@@ -343,14 +371,14 @@ class Wget(FetchMethod):
343 opener = urllib.request.build_opener(*handlers) 371 opener = urllib.request.build_opener(*handlers)
344 372
345 try: 373 try:
346 uri_base = ud.url.split(";")[0] 374 parts = urllib.parse.urlparse(ud.url.split(";")[0])
347 uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path) 375 uri = "{}://{}{}".format(parts.scheme, parts.netloc, parts.path)
348 r = urllib.request.Request(uri) 376 r = urllib.request.Request(uri)
349 r.get_method = lambda: "HEAD" 377 r.get_method = lambda: "HEAD"
350 # Some servers (FusionForge, as used on Alioth) require that the 378 # Some servers (FusionForge, as used on Alioth) require that the
351 # optional Accept header is set. 379 # optional Accept header is set.
352 r.add_header("Accept", "*/*") 380 r.add_header("Accept", "*/*")
353 r.add_header("User-Agent", self.user_agent) 381 r.add_header("User-Agent", "bitbake/{}".format(bb.__version__))
354 def add_basic_auth(login_str, request): 382 def add_basic_auth(login_str, request):
355 '''Adds Basic auth to http request, pass in login:password as string''' 383 '''Adds Basic auth to http request, pass in login:password as string'''
356 import base64 384 import base64
@@ -370,7 +398,7 @@ class Wget(FetchMethod):
370 except (FileNotFoundError, netrc.NetrcParseError): 398 except (FileNotFoundError, netrc.NetrcParseError):
371 pass 399 pass
372 400
373 with opener.open(r, timeout=30) as response: 401 with opener.open(r, timeout=100) as response:
374 pass 402 pass
375 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e: 403 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
376 if try_again: 404 if try_again:
@@ -457,7 +485,7 @@ class Wget(FetchMethod):
457 f = tempfile.NamedTemporaryFile() 485 f = tempfile.NamedTemporaryFile()
458 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: 486 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
459 fetchcmd = self.basecmd 487 fetchcmd = self.basecmd
460 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" 488 fetchcmd += " --output-document=%s '%s'" % (f.name, uri)
461 try: 489 try:
462 self._runwget(ud, d, fetchcmd, True, workdir=workdir) 490 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
463 fetchresult = f.read() 491 fetchresult = f.read()
@@ -617,13 +645,17 @@ class Wget(FetchMethod):
617 645
618 sanity check to ensure same name and type. 646 sanity check to ensure same name and type.
619 """ 647 """
620 package = ud.path.split("/")[-1] 648 if 'downloadfilename' in ud.parm:
649 package = ud.parm['downloadfilename']
650 else:
651 package = ud.path.split("/")[-1]
621 current_version = ['', d.getVar('PV'), ''] 652 current_version = ['', d.getVar('PV'), '']
622 653
623 """possible to have no version in pkg name, such as spectrum-fw""" 654 """possible to have no version in pkg name, such as spectrum-fw"""
624 if not re.search(r"\d+", package): 655 if not re.search(r"\d+", package):
625 current_version[1] = re.sub('_', '.', current_version[1]) 656 current_version[1] = re.sub('_', '.', current_version[1])
626 current_version[1] = re.sub('-', '.', current_version[1]) 657 current_version[1] = re.sub('-', '.', current_version[1])
658 bb.debug(3, "latest_versionstring: no version found in %s" % package)
627 return (current_version[1], '') 659 return (current_version[1], '')
628 660
629 package_regex = self._init_regexes(package, ud, d) 661 package_regex = self._init_regexes(package, ud, d)
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 3e18596faa..4f616ff42e 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -89,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
89 msg = logging.Formatter.format(self, record) 89 msg = logging.Formatter.format(self, record)
90 if hasattr(record, 'bb_exc_formatted'): 90 if hasattr(record, 'bb_exc_formatted'):
91 msg += '\n' + ''.join(record.bb_exc_formatted) 91 msg += '\n' + ''.join(record.bb_exc_formatted)
92 elif hasattr(record, 'bb_exc_info'):
93 etype, value, tb = record.bb_exc_info
94 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
95 msg += '\n' + ''.join(formatted)
96 return msg 92 return msg
97 93
98 def colorize(self, record): 94 def colorize(self, record):
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index a4358f1374..d428d8a4b4 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
63def check_mtime(f, mtime): 65def check_mtime(f, mtime):
64 try: 66 try:
65 current_mtime = os.stat(f)[stat.ST_MTIME] 67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
66 __mtime_cache[f] = current_mtime 69 __mtime_cache[f] = current_mtime
67 except OSError: 70 except OSError:
68 current_mtime = 0 71 current_mtime = 0
@@ -70,7 +73,8 @@ def check_mtime(f, mtime):
70 73
71def update_mtime(f): 74def update_mtime(f):
72 try: 75 try:
73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
74 except OSError: 78 except OSError:
75 if f in __mtime_cache: 79 if f in __mtime_cache:
76 del __mtime_cache[f] 80 del __mtime_cache[f]
@@ -172,4 +176,41 @@ def get_file_depends(d):
172 dep_files.append(os.path.abspath(fn)) 176 dep_files.append(os.path.abspath(fn))
173 return " ".join(dep_files) 177 return " ".join(dep_files)
174 178
179def vardeps(*varnames):
180 """
181 Function decorator that can be used to instruct the bitbake dependency
182 parsing to add a dependency on the specified variables names
183
184 Example:
185
186 @bb.parse.vardeps("FOO", "BAR")
187 def my_function():
188 ...
189
190 """
191 def inner(f):
192 if not hasattr(f, "bb_vardeps"):
193 f.bb_vardeps = set()
194 f.bb_vardeps |= set(varnames)
195 return f
196 return inner
197
198def vardepsexclude(*varnames):
199 """
200 Function decorator that can be used to instruct the bitbake dependency
201 parsing to ignore dependencies on the specified variable names in the code
202
203 Example:
204
205 @bb.parse.vardepsexclude("FOO", "BAR")
206 def my_function():
207 ...
208 """
209 def inner(f):
210 if not hasattr(f, "bb_vardepsexclude"):
211 f.bb_vardepsexclude = set()
212 f.bb_vardepsexclude |= set(varnames)
213 return f
214 return inner
215
175from bb.parse.parse_py import __version__, ConfHandler, BBHandler 216from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 7581d003fd..49a0788038 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -43,6 +43,21 @@ class IncludeNode(AstNode):
43 else: 43 else:
44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) 44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
45 45
46class IncludeAllNode(AstNode):
47 def __init__(self, filename, lineno, what_file):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50
51 def eval(self, data):
52 """
53 Include the file and evaluate the statements
54 """
55 s = data.expand(self.what_file)
56 logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
57
58 for path in data.getVar("BBPATH").split(":"):
59 bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False)
60
46class ExportNode(AstNode): 61class ExportNode(AstNode):
47 def __init__(self, filename, lineno, var): 62 def __init__(self, filename, lineno, var):
48 AstNode.__init__(self, filename, lineno) 63 AstNode.__init__(self, filename, lineno)
@@ -137,7 +152,10 @@ class DataNode(AstNode):
137 152
138 flag = None 153 flag = None
139 if 'flag' in groupd and groupd['flag'] is not None: 154 if 'flag' in groupd and groupd['flag'] is not None:
140 flag = groupd['flag'] 155 if groupd["lazyques"]:
156 flag = "_defaultval_flag_"+groupd['flag']
157 else:
158 flag = groupd['flag']
141 elif groupd["lazyques"]: 159 elif groupd["lazyques"]:
142 flag = "_defaultval" 160 flag = "_defaultval"
143 161
@@ -240,14 +258,16 @@ class ExportFuncsNode(AstNode):
240 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True) 258 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
241 259
242class AddTaskNode(AstNode): 260class AddTaskNode(AstNode):
243 def __init__(self, filename, lineno, func, before, after): 261 def __init__(self, filename, lineno, tasks, before, after):
244 AstNode.__init__(self, filename, lineno) 262 AstNode.__init__(self, filename, lineno)
245 self.func = func 263 self.tasks = tasks
246 self.before = before 264 self.before = before
247 self.after = after 265 self.after = after
248 266
249 def eval(self, data): 267 def eval(self, data):
250 bb.build.addtask(self.func, self.before, self.after, data) 268 tasks = self.tasks.split()
269 for task in tasks:
270 bb.build.addtask(task, self.before, self.after, data)
251 271
252class DelTaskNode(AstNode): 272class DelTaskNode(AstNode):
253 def __init__(self, filename, lineno, tasks): 273 def __init__(self, filename, lineno, tasks):
@@ -320,13 +340,62 @@ class InheritDeferredNode(AstNode):
320 self.inherit = (classes, filename, lineno) 340 self.inherit = (classes, filename, lineno)
321 341
322 def eval(self, data): 342 def eval(self, data):
323 inherits = data.getVar('__BBDEFINHERITS', False) or [] 343 bb.parse.BBHandler.inherit_defer(*self.inherit, data)
324 inherits.append(self.inherit) 344
325 data.setVar('__BBDEFINHERITS', inherits) 345class AddFragmentsNode(AstNode):
346 def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable):
347 AstNode.__init__(self, filename, lineno)
348 self.fragments_path_prefix = fragments_path_prefix
349 self.fragments_variable = fragments_variable
350 self.flagged_variables_list_variable = flagged_variables_list_variable
351 self.builtin_fragments_variable = builtin_fragments_variable
352
353 def eval(self, data):
354 # No need to use mark_dependency since we would only match a fragment
355 # from a specific layer and there can only be a single layer with a
356 # given namespace.
357 def find_fragment(layers, layerid, full_fragment_name):
358 for layerpath in layers.split():
359 candidate_fragment_path = os.path.join(layerpath, full_fragment_name)
360 if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid:
361 return candidate_fragment_path
362 return None
363
364 def check_and_set_builtin_fragment(fragment, data, builtin_fragments):
365 prefix, value = fragment.split('/', 1)
366 if prefix in builtin_fragments.keys():
367 data.setVar(builtin_fragments[prefix], value)
368 return True
369 return False
370
371 fragments = data.getVar(self.fragments_variable)
372 layers = data.getVar('BBLAYERS')
373 flagged_variables = data.getVar(self.flagged_variables_list_variable).split()
374 builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] }
375
376 if not fragments:
377 return
378 for f in fragments.split():
379 if check_and_set_builtin_fragment(f, data, builtin_fragments):
380 continue
381 layerid, fragment_name = f.split('/', 1)
382 full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name))
383 fragment_path = find_fragment(layers, layerid, full_fragment_name)
384 if fragment_path:
385 bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment")
386 for flagged_var in flagged_variables:
387 val = data.getVar(flagged_var)
388 data.setVarFlag(flagged_var, f, val)
389 data.setVar(flagged_var, None)
390 else:
391 bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers))
326 392
327def handleInclude(statements, filename, lineno, m, force): 393def handleInclude(statements, filename, lineno, m, force):
328 statements.append(IncludeNode(filename, lineno, m.group(1), force)) 394 statements.append(IncludeNode(filename, lineno, m.group(1), force))
329 395
396def handleIncludeAll(statements, filename, lineno, m):
397 statements.append(IncludeAllNode(filename, lineno, m.group(1)))
398
330def handleExport(statements, filename, lineno, m): 399def handleExport(statements, filename, lineno, m):
331 statements.append(ExportNode(filename, lineno, m.group(1))) 400 statements.append(ExportNode(filename, lineno, m.group(1)))
332 401
@@ -348,21 +417,11 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body)
348def handleExportFuncs(statements, filename, lineno, m, classname): 417def handleExportFuncs(statements, filename, lineno, m, classname):
349 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) 418 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
350 419
351def handleAddTask(statements, filename, lineno, m): 420def handleAddTask(statements, filename, lineno, tasks, before, after):
352 func = m.group("func") 421 statements.append(AddTaskNode(filename, lineno, tasks, before, after))
353 before = m.group("before")
354 after = m.group("after")
355 if func is None:
356 return
357
358 statements.append(AddTaskNode(filename, lineno, func, before, after))
359 422
360def handleDelTask(statements, filename, lineno, m): 423def handleDelTask(statements, filename, lineno, tasks):
361 func = m.group(1) 424 statements.append(DelTaskNode(filename, lineno, tasks))
362 if func is None:
363 return
364
365 statements.append(DelTaskNode(filename, lineno, func))
366 425
367def handleBBHandlers(statements, filename, lineno, m): 426def handleBBHandlers(statements, filename, lineno, m):
368 statements.append(BBHandlerNode(filename, lineno, m.group(1))) 427 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
@@ -378,12 +437,43 @@ def handleInheritDeferred(statements, filename, lineno, m):
378 classes = m.group(1) 437 classes = m.group(1)
379 statements.append(InheritDeferredNode(filename, lineno, classes)) 438 statements.append(InheritDeferredNode(filename, lineno, classes))
380 439
440def handleAddFragments(statements, filename, lineno, m):
441 fragments_path_prefix = m.group(1)
442 fragments_variable = m.group(2)
443 flagged_variables_list_variable = m.group(3)
444 builtin_fragments_variable = m.group(4)
445 statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable))
446
381def runAnonFuncs(d): 447def runAnonFuncs(d):
382 code = [] 448 code = []
383 for funcname in d.getVar("__BBANONFUNCS", False) or []: 449 for funcname in d.getVar("__BBANONFUNCS", False) or []:
384 code.append("%s(d)" % funcname) 450 code.append("%s(d)" % funcname)
385 bb.utils.better_exec("\n".join(code), {"d": d}) 451 bb.utils.better_exec("\n".join(code), {"d": d})
386 452
453# Handle recipe level PREFERRED_PROVIDERs
454def handleVirtRecipeProviders(tasklist, d):
455 depends = (d.getVar("DEPENDS") or "").split()
456 virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split()
457 newdeps = []
458 for dep in depends:
459 if dep in virtprovs:
460 newdep = d.getVar("PREFERRED_PROVIDER_" + dep)
461 if not newdep:
462 bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep)
463 newdeps.append(newdep)
464 else:
465 newdeps.append(dep)
466 d.setVar("DEPENDS", " ".join(newdeps))
467 for task in tasklist:
468 taskdeps = (d.getVarFlag(task, "depends") or "").split()
469 remapped = []
470 for entry in taskdeps:
471 r, t = entry.split(":")
472 if r in virtprovs:
473 r = d.getVar("PREFERRED_PROVIDER_" + r)
474 remapped.append("%s:%s" % (r, t))
475 d.setVarFlag(task, "depends", " ".join(remapped))
476
387def finalize(fn, d, variant = None): 477def finalize(fn, d, variant = None):
388 saved_handlers = bb.event.get_handlers().copy() 478 saved_handlers = bb.event.get_handlers().copy()
389 try: 479 try:
@@ -391,6 +481,17 @@ def finalize(fn, d, variant = None):
391 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True: 481 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
392 raise bb.BBHandledException() 482 raise bb.BBHandledException()
393 483
484 inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])]
485 bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d)
486
487 while True:
488 inherits = d.getVar('__BBDEFINHERITS', False) or []
489 if not inherits:
490 break
491 inherit, filename, lineno = inherits.pop(0)
492 d.setVar('__BBDEFINHERITS', inherits)
493 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
494
394 for var in d.getVar('__BBHANDLERS', False) or []: 495 for var in d.getVar('__BBHANDLERS', False) or []:
395 # try to add the handler 496 # try to add the handler
396 handlerfn = d.getVarFlag(var, "filename", False) 497 handlerfn = d.getVarFlag(var, "filename", False)
@@ -409,6 +510,7 @@ def finalize(fn, d, variant = None):
409 510
410 tasklist = d.getVar('__BBTASKS', False) or [] 511 tasklist = d.getVar('__BBTASKS', False) or []
411 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) 512 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
513 handleVirtRecipeProviders(tasklist, d)
412 bb.build.add_tasks(tasklist, d) 514 bb.build.add_tasks(tasklist, d)
413 515
414 bb.parse.siggen.finalise(fn, d, variant) 516 bb.parse.siggen.finalise(fn, d, variant)
@@ -444,14 +546,6 @@ def multi_finalize(fn, d):
444 logger.debug("Appending .bbappend file %s to %s", append, fn) 546 logger.debug("Appending .bbappend file %s to %s", append, fn)
445 bb.parse.BBHandler.handle(append, d, True) 547 bb.parse.BBHandler.handle(append, d, True)
446 548
447 while True:
448 inherits = d.getVar('__BBDEFINHERITS', False) or []
449 if not inherits:
450 break
451 inherit, filename, lineno = inherits.pop(0)
452 d.setVar('__BBDEFINHERITS', inherits)
453 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
454
455 onlyfinalise = d.getVar("__ONLYFINALISE", False) 549 onlyfinalise = d.getVar("__ONLYFINALISE", False)
456 550
457 safe_d = d 551 safe_d = d
@@ -487,7 +581,7 @@ def multi_finalize(fn, d):
487 d.setVar("BBEXTENDVARIANT", variantmap[name]) 581 d.setVar("BBEXTENDVARIANT", variantmap[name])
488 else: 582 else:
489 d.setVar("PN", "%s-%s" % (pn, name)) 583 d.setVar("PN", "%s-%s" % (pn, name))
490 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) 584 bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d)
491 585
492 safe_d.setVar("BBCLASSEXTEND", extended) 586 safe_d.setVar("BBCLASSEXTEND", extended)
493 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) 587 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index c13e4b9755..008fec2308 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -23,8 +23,8 @@ __func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>faker
23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" ) 24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
26__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 26__addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)")
27__deltask_regexp__ = re.compile(r"deltask\s+(.+)") 27__deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)")
28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) 28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
29__def_regexp__ = re.compile(r"def\s+(\w+).*:" ) 29__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) 30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -42,12 +42,22 @@ def supports(fn, d):
42 """Return True if fn has a supported extension""" 42 """Return True if fn has a supported extension"""
43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] 43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
44 44
45def inherit_defer(expression, fn, lineno, d):
46 inherit = (expression, fn, lineno)
47 inherits = d.getVar('__BBDEFINHERITS', False) or []
48 inherits.append(inherit)
49 d.setVar('__BBDEFINHERITS', inherits)
50
45def inherit(files, fn, lineno, d, deferred=False): 51def inherit(files, fn, lineno, d, deferred=False):
46 __inherit_cache = d.getVar('__inherit_cache', False) or [] 52 __inherit_cache = d.getVar('__inherit_cache', False) or []
47 #if "${" in files and not deferred: 53 #if "${" in files and not deferred:
48 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno)) 54 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
49 files = d.expand(files).split() 55 files = d.expand(files).split()
50 for file in files: 56 for file in files:
57 defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split()
58 if not deferred and file in defer:
59 inherit_defer(file, fn, lineno, d)
60 continue
51 classtype = d.getVar("__bbclasstype", False) 61 classtype = d.getVar("__bbclasstype", False)
52 origfile = file 62 origfile = file
53 for t in ["classes-" + classtype, "classes"]: 63 for t in ["classes-" + classtype, "classes"]:
@@ -239,29 +249,38 @@ def feeder(lineno, s, fn, root, statements, eof=False):
239 249
240 m = __addtask_regexp__.match(s) 250 m = __addtask_regexp__.match(s)
241 if m: 251 if m:
242 if len(m.group().split()) == 2: 252 after = ""
243 # Check and warn for "addtask task1 task2" 253 before = ""
244 m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s) 254
245 if m2 and m2.group('ignores'): 255 # This code splits on 'before' and 'after' instead of on whitespace so we can defer
246 logger.warning('addtask ignored: "%s"' % m2.group('ignores')) 256 # evaluation to as late as possible.
247 257 tasks = m.group(1).split(" before ")[0].split(" after ")[0]
248 # Check and warn for "addtask task1 before task2 before task3", the 258
249 # similar to "after" 259 for exp in m.group(1).split(" before "):
250 taskexpression = s.split() 260 exp2 = exp.split(" after ")
251 for word in ('before', 'after'): 261 if len(exp2) > 1:
252 if taskexpression.count(word) > 1: 262 after = after + " ".join(exp2[1:])
253 logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
254 263
255 # Check and warn for having task with exprssion as part of task name 264 for exp in m.group(1).split(" after "):
265 exp2 = exp.split(" before ")
266 if len(exp2) > 1:
267 before = before + " ".join(exp2[1:])
268
269 # Check and warn for having task with a keyword as part of task name
270 taskexpression = s.split()
256 for te in taskexpression: 271 for te in taskexpression:
257 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): 272 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
258 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) 273 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
259 ast.handleAddTask(statements, fn, lineno, m) 274
275 if tasks is not None:
276 ast.handleAddTask(statements, fn, lineno, tasks, before, after)
260 return 277 return
261 278
262 m = __deltask_regexp__.match(s) 279 m = __deltask_regexp__.match(s)
263 if m: 280 if m:
264 ast.handleDelTask(statements, fn, lineno, m) 281 task = m.group(1)
282 if task is not None:
283 ast.handleDelTask(statements, fn, lineno, task)
265 return 284 return
266 285
267 m = __addhandler_regexp__.match(s) 286 m = __addhandler_regexp__.match(s)
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 7826dee7d3..9ddbae123d 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,10 +20,10 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])?
25 25
26 \s* ( 26 (?P<whitespace>\s*) (
27 (?P<colon>:=) | 27 (?P<colon>:=) |
28 (?P<lazyques>\?\?=) | 28 (?P<lazyques>\?\?=) |
29 (?P<ques>\?=) | 29 (?P<ques>\?=) |
@@ -32,7 +32,7 @@ __config_regexp__ = re.compile( r"""
32 (?P<predot>=\.) | 32 (?P<predot>=\.) |
33 (?P<postdot>\.=) | 33 (?P<postdot>\.=) |
34 = 34 =
35 ) \s* 35 ) (?P<whitespace2>\s*)
36 36
37 (?!'[^']*'[^']*'$) 37 (?!'[^']*'[^']*'$)
38 (?!\"[^\"]*\"[^\"]*\"$) 38 (?!\"[^\"]*\"[^\"]*\"$)
@@ -43,10 +43,12 @@ __config_regexp__ = re.compile( r"""
43 """, re.X) 43 """, re.X)
44__include_regexp__ = re.compile( r"include\s+(.+)" ) 44__include_regexp__ = re.compile( r"include\s+(.+)" )
45__require_regexp__ = re.compile( r"require\s+(.+)" ) 45__require_regexp__ = re.compile( r"require\s+(.+)" )
46__includeall_regexp__ = re.compile( r"include_all\s+(.+)" )
46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 47__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 48__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" ) 49__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
49__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" ) 50__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
51__addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" )
50 52
51def init(data): 53def init(data):
52 return 54 return
@@ -164,6 +166,10 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
164 m = __config_regexp__.match(s) 166 m = __config_regexp__.match(s)
165 if m: 167 if m:
166 groupd = m.groupdict() 168 groupd = m.groupdict()
169 if groupd['var'] == "":
170 raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno);
171 if not groupd['whitespace'] or not groupd['whitespace2']:
172 logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s))
167 ast.handleData(statements, fn, lineno, groupd) 173 ast.handleData(statements, fn, lineno, groupd)
168 return 174 return
169 175
@@ -177,6 +183,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
177 ast.handleInclude(statements, fn, lineno, m, True) 183 ast.handleInclude(statements, fn, lineno, m, True)
178 return 184 return
179 185
186 m = __includeall_regexp__.match(s)
187 if m:
188 ast.handleIncludeAll(statements, fn, lineno, m)
189 return
190
180 m = __export_regexp__.match(s) 191 m = __export_regexp__.match(s)
181 if m: 192 if m:
182 ast.handleExport(statements, fn, lineno, m) 193 ast.handleExport(statements, fn, lineno, m)
@@ -197,6 +208,11 @@ def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
197 ast.handlePyLib(statements, fn, lineno, m) 208 ast.handlePyLib(statements, fn, lineno, m)
198 return 209 return
199 210
211 m = __addfragments_regexp__.match(s)
212 if m:
213 ast.handleAddFragments(statements, fn, lineno, m)
214 return
215
200 raise ParseError("unparsed line: '%s'" % s, fn, lineno); 216 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
201 217
202# Add us to the handlers list 218# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index bcca791edf..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,271 +0,0 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import collections
15import collections.abc
16import contextlib
17import functools
18import logging
19import os.path
20import sqlite3
21import sys
22from collections.abc import Mapping
23
24sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
26 raise Exception("sqlite3 version 3.3.0 or later is required.")
27
28
29logger = logging.getLogger("BitBake.PersistData")
30
31@functools.total_ordering
32class SQLTable(collections.abc.MutableMapping):
33 class _Decorators(object):
34 @staticmethod
35 def retry(*, reconnect=True):
36 """
37 Decorator that restarts a function if a database locked sqlite
38 exception occurs. If reconnect is True, the database connection
39 will be closed and reopened each time a failure occurs
40 """
41 def retry_wrapper(f):
42 def wrap_func(self, *args, **kwargs):
43 # Reconnect if necessary
44 if self.connection is None and reconnect:
45 self.reconnect()
46
47 count = 0
48 while True:
49 try:
50 return f(self, *args, **kwargs)
51 except sqlite3.OperationalError as exc:
52 if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
53 count = count + 1
54 if reconnect:
55 self.reconnect()
56 continue
57 raise
58 return wrap_func
59 return retry_wrapper
60
61 @staticmethod
62 def transaction(f):
63 """
64 Decorator that starts a database transaction and creates a database
65 cursor for performing queries. If no exception is thrown, the
66 database results are committed. If an exception occurs, the database
67 is rolled back. In all cases, the cursor is closed after the
68 function ends.
69
70 Note that the cursor is passed as an extra argument to the function
71 after `self` and before any of the normal arguments
72 """
73 def wrap_func(self, *args, **kwargs):
74 # Context manager will COMMIT the database on success,
75 # or ROLLBACK on an exception
76 with self.connection:
77 # Automatically close the cursor when done
78 with contextlib.closing(self.connection.cursor()) as cursor:
79 return f(self, cursor, *args, **kwargs)
80 return wrap_func
81
82 """Object representing a table/domain in the database"""
83 def __init__(self, cachefile, table):
84 self.cachefile = cachefile
85 self.table = table
86
87 self.connection = None
88 self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
89
90 @_Decorators.retry(reconnect=False)
91 @_Decorators.transaction
92 def _setup_database(self, cursor):
93 cursor.execute("pragma synchronous = off;")
94 # Enable WAL and keep the autocheckpoint length small (the default is
95 # usually 1000). Persistent caches are usually read-mostly, so keeping
96 # this short will keep readers running quickly
97 cursor.execute("pragma journal_mode = WAL;")
98 cursor.execute("pragma wal_autocheckpoint = 100;")
99
100 def reconnect(self):
101 if self.connection is not None:
102 self.connection.close()
103 self.connection = sqlite3.connect(self.cachefile, timeout=5)
104 self.connection.text_factory = str
105 self._setup_database()
106
107 @_Decorators.retry()
108 @_Decorators.transaction
109 def _execute_single(self, cursor, *query):
110 """
111 Executes a single query and discards the results. This correctly closes
112 the database cursor when finished
113 """
114 cursor.execute(*query)
115
116 @_Decorators.retry()
117 def _row_iter(self, f, *query):
118 """
119 Helper function that returns a row iterator. Each time __next__ is
120 called on the iterator, the provided function is evaluated to determine
121 the return value
122 """
123 class CursorIter(object):
124 def __init__(self, cursor):
125 self.cursor = cursor
126
127 def __iter__(self):
128 return self
129
130 def __next__(self):
131 row = self.cursor.fetchone()
132 if row is None:
133 self.cursor.close()
134 raise StopIteration
135 return f(row)
136
137 def __enter__(self):
138 return self
139
140 def __exit__(self, typ, value, traceback):
141 self.cursor.close()
142 return False
143
144 cursor = self.connection.cursor()
145 try:
146 cursor.execute(*query)
147 return CursorIter(cursor)
148 except:
149 cursor.close()
150
151 def __enter__(self):
152 self.connection.__enter__()
153 return self
154
155 def __exit__(self, *excinfo):
156 self.connection.__exit__(*excinfo)
157
158 @_Decorators.retry()
159 @_Decorators.transaction
160 def __getitem__(self, cursor, key):
161 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
162 row = cursor.fetchone()
163 if row is not None:
164 return row[1]
165 raise KeyError(key)
166
167 @_Decorators.retry()
168 @_Decorators.transaction
169 def __delitem__(self, cursor, key):
170 if key not in self:
171 raise KeyError(key)
172 cursor.execute("DELETE from %s where key=?;" % self.table, [key])
173
174 @_Decorators.retry()
175 @_Decorators.transaction
176 def __setitem__(self, cursor, key, value):
177 if not isinstance(key, str):
178 raise TypeError('Only string keys are supported')
179 elif not isinstance(value, str):
180 raise TypeError('Only string values are supported')
181
182 # Ensure the entire transaction (including SELECT) executes under write lock
183 cursor.execute("BEGIN EXCLUSIVE")
184
185 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
186 row = cursor.fetchone()
187 if row is not None:
188 cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
189 else:
190 cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
191
192 @_Decorators.retry()
193 @_Decorators.transaction
194 def __contains__(self, cursor, key):
195 cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
196 return cursor.fetchone() is not None
197
198 @_Decorators.retry()
199 @_Decorators.transaction
200 def __len__(self, cursor):
201 cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
202 row = cursor.fetchone()
203 if row is not None:
204 return row[0]
205
206 def __iter__(self):
207 return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
208
209 def __lt__(self, other):
210 if not isinstance(other, Mapping):
211 raise NotImplementedError()
212
213 return len(self) < len(other)
214
215 def get_by_pattern(self, pattern):
216 return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
217 self.table, [pattern])
218
219 def values(self):
220 return list(self.itervalues())
221
222 def itervalues(self):
223 return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
224 self.table)
225
226 def items(self):
227 return list(self.iteritems())
228
229 def iteritems(self):
230 return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
231 self.table)
232
233 @_Decorators.retry()
234 @_Decorators.transaction
235 def clear(self, cursor):
236 cursor.execute("DELETE FROM %s;" % self.table)
237
238 def has_key(self, key):
239 return key in self
240
241def persist(domain, d):
242 """Convenience factory for SQLTable objects based upon metadata"""
243 import bb.utils
244 cachedir = (d.getVar("PERSISTENT_DIR") or
245 d.getVar("CACHE"))
246 if not cachedir:
247 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
248 sys.exit(1)
249
250 bb.utils.mkdirhier(cachedir)
251 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
252
253 try:
254 return SQLTable(cachefile, domain)
255 except sqlite3.OperationalError:
256 # Sqlite fails to open database when its path is too long.
257 # After testing, 504 is the biggest path length that can be opened by
258 # sqlite.
259 # Note: This code is called before sanity.bbclass and its path length
260 # check
261 max_len = 504
262 if len(cachefile) > max_len:
263 logger.critical("The path of the cache file is too long "
264 "({0} chars > {1}) to be opened by sqlite! "
265 "Your cache file is \"{2}\"".format(
266 len(cachefile),
267 max_len,
268 cachefile))
269 sys.exit(1)
270 else:
271 raise
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index bc7e18175d..80f3d3282f 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -14,6 +14,7 @@ import os
14import sys 14import sys
15import stat 15import stat
16import errno 16import errno
17import itertools
17import logging 18import logging
18import re 19import re
19import bb 20import bb
@@ -128,6 +129,7 @@ class RunQueueStats:
128# runQueue state machine 129# runQueue state machine
129runQueuePrepare = 2 130runQueuePrepare = 2
130runQueueSceneInit = 3 131runQueueSceneInit = 3
132runQueueDumpSigs = 4
131runQueueRunning = 6 133runQueueRunning = 6
132runQueueFailed = 7 134runQueueFailed = 7
133runQueueCleanUp = 8 135runQueueCleanUp = 8
@@ -475,7 +477,6 @@ class RunQueueData:
475 self.runtaskentries = {} 477 self.runtaskentries = {}
476 478
477 def runq_depends_names(self, ids): 479 def runq_depends_names(self, ids):
478 import re
479 ret = [] 480 ret = []
480 for id in ids: 481 for id in ids:
481 nam = os.path.basename(id) 482 nam = os.path.basename(id)
@@ -728,6 +729,8 @@ class RunQueueData:
728 if mc == frommc: 729 if mc == frommc:
729 fn = taskData[mcdep].build_targets[pn][0] 730 fn = taskData[mcdep].build_targets[pn][0]
730 newdep = '%s:%s' % (fn,deptask) 731 newdep = '%s:%s' % (fn,deptask)
732 if newdep not in taskData[mcdep].taskentries:
733 bb.fatal("Task mcdepends on non-existent task %s" % (newdep))
731 taskData[mc].taskentries[tid].tdepends.append(newdep) 734 taskData[mc].taskentries[tid].tdepends.append(newdep)
732 735
733 for mc in taskData: 736 for mc in taskData:
@@ -1273,27 +1276,41 @@ class RunQueueData:
1273 1276
1274 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1277 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1275 1278
1279 starttime = time.time()
1280 lasttime = starttime
1281
1276 # Iterate over the task list and call into the siggen code 1282 # Iterate over the task list and call into the siggen code
1277 dealtwith = set() 1283 dealtwith = set()
1278 todeal = set(self.runtaskentries) 1284 todeal = set(self.runtaskentries)
1279 while todeal: 1285 while todeal:
1286 ready = set()
1280 for tid in todeal.copy(): 1287 for tid in todeal.copy():
1281 if not (self.runtaskentries[tid].depends - dealtwith): 1288 if not (self.runtaskentries[tid].depends - dealtwith):
1282 dealtwith.add(tid) 1289 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1283 todeal.remove(tid) 1290 # get_taskhash for a given tid *must* be called before get_unihash* below
1284 self.prepare_task_hash(tid) 1291 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1285 bb.event.check_for_interrupts(self.cooker.data) 1292 ready.add(tid)
1293 unihashes = bb.parse.siggen.get_unihashes(ready)
1294 for tid in ready:
1295 dealtwith.add(tid)
1296 todeal.remove(tid)
1297 self.runtaskentries[tid].unihash = unihashes[tid]
1298
1299 bb.event.check_for_interrupts(self.cooker.data)
1300
1301 if time.time() > (lasttime + 30):
1302 lasttime = time.time()
1303 hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime))
1304
1305 endtime = time.time()
1306 if (endtime-starttime > 60):
1307 hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime))
1286 1308
1287 bb.parse.siggen.writeout_file_checksum_cache() 1309 bb.parse.siggen.writeout_file_checksum_cache()
1288 1310
1289 #self.dump_data() 1311 #self.dump_data()
1290 return len(self.runtaskentries) 1312 return len(self.runtaskentries)
1291 1313
1292 def prepare_task_hash(self, tid):
1293 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1296
1297 def dump_data(self): 1314 def dump_data(self):
1298 """ 1315 """
1299 Dump some debug information on the internal data structures 1316 Dump some debug information on the internal data structures
@@ -1574,14 +1591,19 @@ class RunQueue:
1574 self.rqdata.init_progress_reporter.next_stage() 1591 self.rqdata.init_progress_reporter.next_stage()
1575 self.rqexe = RunQueueExecute(self) 1592 self.rqexe = RunQueueExecute(self)
1576 1593
1577 dump = self.cooker.configuration.dump_signatures 1594 dumpsigs = self.cooker.configuration.dump_signatures
1578 if dump: 1595 if dumpsigs:
1579 self.rqdata.init_progress_reporter.finish() 1596 self.rqdata.init_progress_reporter.finish()
1580 if 'printdiff' in dump: 1597 if 'printdiff' in dumpsigs:
1581 invalidtasks = self.print_diffscenetasks() 1598 self.invalidtasks_dump = self.print_diffscenetasks()
1582 self.dump_signatures(dump) 1599 self.state = runQueueDumpSigs
1583 if 'printdiff' in dump: 1600
1584 self.write_diffscenetasks(invalidtasks) 1601 if self.state is runQueueDumpSigs:
1602 dumpsigs = self.cooker.configuration.dump_signatures
1603 retval = self.dump_signatures(dumpsigs)
1604 if retval is False:
1605 if 'printdiff' in dumpsigs:
1606 self.write_diffscenetasks(self.invalidtasks_dump)
1585 self.state = runQueueComplete 1607 self.state = runQueueComplete
1586 1608
1587 if self.state is runQueueSceneInit: 1609 if self.state is runQueueSceneInit:
@@ -1672,33 +1694,42 @@ class RunQueue:
1672 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True) 1694 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
1673 1695
1674 def dump_signatures(self, options): 1696 def dump_signatures(self, options):
1675 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset: 1697 if not hasattr(self, "dumpsigs_launched"):
1676 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled") 1698 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
1677 1699 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
1678 bb.note("Writing task signature files") 1700
1679 1701 bb.note("Writing task signature files")
1680 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) 1702
1681 def chunkify(l, n): 1703 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
1682 return [l[i::n] for i in range(n)] 1704 def chunkify(l, n):
1683 tids = chunkify(list(self.rqdata.runtaskentries), max_process) 1705 return [l[i::n] for i in range(n)]
1684 # We cannot use the real multiprocessing.Pool easily due to some local data 1706 dumpsigs_tids = chunkify(list(self.rqdata.runtaskentries), max_process)
1685 # that can't be pickled. This is a cheap multi-process solution. 1707
1686 launched = [] 1708 # We cannot use the real multiprocessing.Pool easily due to some local data
1687 while tids: 1709 # that can't be pickled. This is a cheap multi-process solution.
1688 if len(launched) < max_process: 1710 self.dumpsigs_launched = []
1689 p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), )) 1711
1712 for tids in dumpsigs_tids:
1713 p = Process(target=self._rq_dump_sigtid, args=(tids, ))
1690 p.start() 1714 p.start()
1691 launched.append(p) 1715 self.dumpsigs_launched.append(p)
1692 for q in launched: 1716
1693 # The finished processes are joined when calling is_alive() 1717 return 1.0
1694 if not q.is_alive(): 1718
1695 launched.remove(q) 1719 for q in self.dumpsigs_launched:
1696 for p in launched: 1720 # The finished processes are joined when calling is_alive()
1721 if not q.is_alive():
1722 self.dumpsigs_launched.remove(q)
1723
1724 if self.dumpsigs_launched:
1725 return 1.0
1726
1727 for p in self.dumpsigs_launched:
1697 p.join() 1728 p.join()
1698 1729
1699 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) 1730 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
1700 1731
1701 return 1732 return False
1702 1733
1703 def print_diffscenetasks(self): 1734 def print_diffscenetasks(self):
1704 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid): 1735 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
@@ -2175,12 +2206,20 @@ class RunQueueExecute:
2175 if not hasattr(self, "sorted_setscene_tids"): 2206 if not hasattr(self, "sorted_setscene_tids"):
2176 # Don't want to sort this set every execution 2207 # Don't want to sort this set every execution
2177 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) 2208 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
2209 # Resume looping where we left off when we returned to feed the mainloop
2210 self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
2178 2211
2179 task = None 2212 task = None
2180 if not self.sqdone and self.can_start_task(): 2213 if not self.sqdone and self.can_start_task():
2181 # Find the next setscene to run 2214 loopcount = 0
2182 for nexttask in self.sorted_setscene_tids: 2215 # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
2216 while loopcount < len(self.rqdata.runq_setscene_tids):
2217 loopcount += 1
2218 nexttask = next(self.setscene_tids_generator)
2183 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred: 2219 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
2220 if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
2221 # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
2222 continue
2184 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \ 2223 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2185 nexttask not in self.sq_needed_harddeps and \ 2224 nexttask not in self.sq_needed_harddeps and \
2186 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \ 2225 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
@@ -2210,8 +2249,7 @@ class RunQueueExecute:
2210 if t in self.runq_running and t not in self.runq_complete: 2249 if t in self.runq_running and t not in self.runq_complete:
2211 continue 2250 continue
2212 if nexttask in self.sq_deferred: 2251 if nexttask in self.sq_deferred:
2213 if self.sq_deferred[nexttask] not in self.runq_complete: 2252 # Deferred tasks that were still deferred were skipped above so we now need to process
2214 continue
2215 logger.debug("Task %s no longer deferred" % nexttask) 2253 logger.debug("Task %s no longer deferred" % nexttask)
2216 del self.sq_deferred[nexttask] 2254 del self.sq_deferred[nexttask]
2217 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) 2255 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2438,14 +2476,17 @@ class RunQueueExecute:
2438 taskdepdata_cache = {} 2476 taskdepdata_cache = {}
2439 for task in self.rqdata.runtaskentries: 2477 for task in self.rqdata.runtaskentries:
2440 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2478 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2441 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2479 taskdepdata_cache[task] = bb.TaskData(
2442 deps = self.rqdata.runtaskentries[task].depends 2480 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2443 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2481 taskname = taskname,
2444 taskhash = self.rqdata.runtaskentries[task].hash 2482 fn = fn,
2445 unihash = self.rqdata.runtaskentries[task].unihash 2483 deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends),
2446 deps = self.filtermcdeps(task, mc, deps) 2484 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2485 taskhash = self.rqdata.runtaskentries[task].hash,
2448 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2486 unihash = self.rqdata.runtaskentries[task].unihash,
2487 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2488 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps,
2489 )
2449 2490
2450 self.taskdepdata_cache = taskdepdata_cache 2491 self.taskdepdata_cache = taskdepdata_cache
2451 2492
@@ -2460,9 +2501,11 @@ class RunQueueExecute:
2460 while next: 2501 while next:
2461 additional = [] 2502 additional = []
2462 for revdep in next: 2503 for revdep in next:
2463 self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash 2504 self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace(
2505 unihash=self.rqdata.runtaskentries[revdep].unihash
2506 )
2464 taskdepdata[revdep] = self.taskdepdata_cache[revdep] 2507 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2465 for revdep2 in self.taskdepdata_cache[revdep][3]: 2508 for revdep2 in self.taskdepdata_cache[revdep].deps:
2466 if revdep2 not in taskdepdata: 2509 if revdep2 not in taskdepdata:
2467 additional.append(revdep2) 2510 additional.append(revdep2)
2468 next = additional 2511 next = additional
@@ -2531,9 +2574,6 @@ class RunQueueExecute:
2531 self.rqdata.runtaskentries[hashtid].unihash = unihash 2574 self.rqdata.runtaskentries[hashtid].unihash = unihash
2532 bb.parse.siggen.set_unihash(hashtid, unihash) 2575 bb.parse.siggen.set_unihash(hashtid, unihash)
2533 toprocess.add(hashtid) 2576 toprocess.add(hashtid)
2534 if torehash:
2535 # Need to save after set_unihash above
2536 bb.parse.siggen.save_unitaskhashes()
2537 2577
2538 # Work out all tasks which depend upon these 2578 # Work out all tasks which depend upon these
2539 total = set() 2579 total = set()
@@ -2556,17 +2596,28 @@ class RunQueueExecute:
2556 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2596 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2557 next.add(p) 2597 next.add(p)
2558 2598
2599 starttime = time.time()
2600 lasttime = starttime
2601
2559 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled 2602 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled
2560 while next: 2603 while next:
2561 current = next.copy() 2604 current = next.copy()
2562 next = set() 2605 next = set()
2606 ready = {}
2563 for tid in current: 2607 for tid in current:
2564 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2608 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2565 continue 2609 continue
2610 # get_taskhash for a given tid *must* be called before get_unihash* below
2611 ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2612
2613 unihashes = bb.parse.siggen.get_unihashes(ready.keys())
2614
2615 for tid in ready:
2566 orighash = self.rqdata.runtaskentries[tid].hash 2616 orighash = self.rqdata.runtaskentries[tid].hash
2567 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) 2617 newhash = ready[tid]
2568 origuni = self.rqdata.runtaskentries[tid].unihash 2618 origuni = self.rqdata.runtaskentries[tid].unihash
2569 newuni = bb.parse.siggen.get_unihash(tid) 2619 newuni = unihashes[tid]
2620
2570 # FIXME, need to check it can come from sstate at all for determinism? 2621 # FIXME, need to check it can come from sstate at all for determinism?
2571 remapped = False 2622 remapped = False
2572 if newuni == origuni: 2623 if newuni == origuni:
@@ -2587,6 +2638,15 @@ class RunQueueExecute:
2587 next |= self.rqdata.runtaskentries[tid].revdeps 2638 next |= self.rqdata.runtaskentries[tid].revdeps
2588 total.remove(tid) 2639 total.remove(tid)
2589 next.intersection_update(total) 2640 next.intersection_update(total)
2641 bb.event.check_for_interrupts(self.cooker.data)
2642
2643 if time.time() > (lasttime + 30):
2644 lasttime = time.time()
2645 hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime))
2646
2647 endtime = time.time()
2648 if (endtime-starttime > 60):
2649 hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime))
2590 2650
2591 if changed: 2651 if changed:
2592 for mc in self.rq.worker: 2652 for mc in self.rq.worker:
@@ -2712,8 +2772,12 @@ class RunQueueExecute:
2712 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2772 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2713 self.sq_task_failoutright(dep) 2773 self.sq_task_failoutright(dep)
2714 continue 2774 continue
2775
2776 # For performance, only compute allcovered once if needed
2777 if self.sqdata.sq_deps[task]:
2778 allcovered = self.scenequeue_covered | self.scenequeue_notcovered
2715 for dep in sorted(self.sqdata.sq_deps[task]): 2779 for dep in sorted(self.sqdata.sq_deps[task]):
2716 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2780 if self.sqdata.sq_revdeps[dep].issubset(allcovered):
2717 if dep not in self.sq_buildable: 2781 if dep not in self.sq_buildable:
2718 self.sq_buildable.add(dep) 2782 self.sq_buildable.add(dep)
2719 2783
@@ -2806,13 +2870,19 @@ class RunQueueExecute:
2806 additional = [] 2870 additional = []
2807 for revdep in next: 2871 for revdep in next:
2808 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2872 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
2809 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2810 deps = getsetscenedeps(revdep) 2873 deps = getsetscenedeps(revdep)
2811 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2874
2812 taskhash = self.rqdata.runtaskentries[revdep].hash 2875 taskdepdata[revdep] = bb.TaskData(
2813 unihash = self.rqdata.runtaskentries[revdep].unihash 2876 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2814 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2877 taskname = taskname,
2815 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2878 fn = fn,
2879 deps = deps,
2880 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2881 taskhash = self.rqdata.runtaskentries[revdep].hash,
2882 unihash = self.rqdata.runtaskentries[revdep].unihash,
2883 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2884 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps,
2885 )
2816 for revdep2 in deps: 2886 for revdep2 in deps:
2817 if revdep2 not in taskdepdata: 2887 if revdep2 not in taskdepdata:
2818 additional.append(revdep2) 2888 additional.append(revdep2)
@@ -2964,14 +3034,13 @@ def build_scenequeue_data(sqdata, rqdata, sqrq):
2964 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) 3034 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries))
2965 3035
2966 # Sanity check all dependencies could be changed to setscene task references 3036 # Sanity check all dependencies could be changed to setscene task references
2967 for taskcounter, tid in enumerate(rqdata.runtaskentries): 3037 for tid in rqdata.runtaskentries:
2968 if tid in rqdata.runq_setscene_tids: 3038 if tid in rqdata.runq_setscene_tids:
2969 pass 3039 pass
2970 elif sq_revdeps_squash[tid]: 3040 elif sq_revdeps_squash[tid]:
2971 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.") 3041 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
2972 else: 3042 else:
2973 del sq_revdeps_squash[tid] 3043 del sq_revdeps_squash[tid]
2974 rqdata.init_progress_reporter.update(taskcounter)
2975 3044
2976 rqdata.init_progress_reporter.next_stage() 3045 rqdata.init_progress_reporter.next_stage()
2977 3046
@@ -3261,7 +3330,7 @@ class runQueuePipe():
3261 3330
3262 start = len(self.queue) 3331 start = len(self.queue)
3263 try: 3332 try:
3264 self.queue.extend(self.input.read(102400) or b"") 3333 self.queue.extend(self.input.read(512 * 1024) or b"")
3265 except (OSError, IOError) as e: 3334 except (OSError, IOError) as e:
3266 if e.errno != errno.EAGAIN: 3335 if e.errno != errno.EAGAIN:
3267 raise 3336 raise
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index 76b189291d..4b35be62cd 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -321,7 +321,22 @@ class ProcessServer():
321 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) 321 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
322 seendata = True 322 seendata = True
323 323
324 ready = self.idle_commands(.1, fds) 324 if not self.idle:
325 self.idle = threading.Thread(target=self.idle_thread)
326 self.idle.start()
327 elif self.idle and not self.idle.is_alive():
328 serverlog("Idle thread terminated, main thread exiting too")
329 bb.error("Idle thread terminated, main thread exiting too")
330 self.quit = True
331
332 nextsleep = 1.0
333 if self.xmlrpc:
334 nextsleep = self.xmlrpc.get_timeout(nextsleep)
335 try:
336 ready = select.select(fds,[],[],nextsleep)[0]
337 except InterruptedError:
338 # Ignore EINTR
339 ready = []
325 340
326 if self.idle: 341 if self.idle:
327 self.idle.join() 342 self.idle.join()
@@ -424,7 +439,7 @@ class ProcessServer():
424 self.idle_cond.notify_all() 439 self.idle_cond.notify_all()
425 440
426 while not self.quit: 441 while not self.quit:
427 nextsleep = 0.1 442 nextsleep = 1.0
428 fds = [] 443 fds = []
429 444
430 with bb.utils.lock_timeout(self._idlefuncsLock): 445 with bb.utils.lock_timeout(self._idlefuncsLock):
@@ -462,7 +477,7 @@ class ProcessServer():
462 477
463 # Create new heartbeat event? 478 # Create new heartbeat event?
464 now = time.time() 479 now = time.time()
465 if bb.event._heartbeat_enabled and now >= self.next_heartbeat: 480 if items and bb.event._heartbeat_enabled and now >= self.next_heartbeat:
466 # We might have missed heartbeats. Just trigger once in 481 # We might have missed heartbeats. Just trigger once in
467 # that case and continue after the usual delay. 482 # that case and continue after the usual delay.
468 self.next_heartbeat += self.heartbeat_seconds 483 self.next_heartbeat += self.heartbeat_seconds
@@ -485,31 +500,6 @@ class ProcessServer():
485 if nextsleep is not None: 500 if nextsleep is not None:
486 select.select(fds,[],[],nextsleep)[0] 501 select.select(fds,[],[],nextsleep)[0]
487 502
488 def idle_commands(self, delay, fds=None):
489 nextsleep = delay
490 if not fds:
491 fds = []
492
493 if not self.idle:
494 self.idle = threading.Thread(target=self.idle_thread)
495 self.idle.start()
496 elif self.idle and not self.idle.is_alive():
497 serverlog("Idle thread terminated, main thread exiting too")
498 bb.error("Idle thread terminated, main thread exiting too")
499 self.quit = True
500
501 if nextsleep is not None:
502 if self.xmlrpc:
503 nextsleep = self.xmlrpc.get_timeout(nextsleep)
504 try:
505 return select.select(fds,[],[],nextsleep)[0]
506 except InterruptedError:
507 # Ignore EINTR
508 return []
509 else:
510 return select.select(fds,[],[],0)[0]
511
512
513class ServerCommunicator(): 503class ServerCommunicator():
514 def __init__(self, connection, recv): 504 def __init__(self, connection, recv):
515 self.connection = connection 505 self.connection = connection
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 04b0b17db1..ebc271aca4 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -14,6 +14,8 @@ from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
14import bb.server.xmlrpcclient 14import bb.server.xmlrpcclient
15 15
16import bb 16import bb
17import bb.cooker
18import bb.event
17 19
18# This request handler checks if the request has a "Bitbake-token" header 20# This request handler checks if the request has a "Bitbake-token" header
19# field (this comes from the client side) and compares it with its internal 21# field (this comes from the client side) and compares it with its internal
@@ -54,7 +56,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
54 56
55 def __init__(self, interface, cooker, parent): 57 def __init__(self, interface, cooker, parent):
56 # Use auto port configuration 58 # Use auto port configuration
57 if (interface[1] == -1): 59 if interface[1] == -1:
58 interface = (interface[0], 0) 60 interface = (interface[0], 0)
59 SimpleXMLRPCServer.__init__(self, interface, 61 SimpleXMLRPCServer.__init__(self, interface,
60 requestHandler=BitBakeXMLRPCRequestHandler, 62 requestHandler=BitBakeXMLRPCRequestHandler,
@@ -87,11 +89,12 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
87 def handle_requests(self): 89 def handle_requests(self):
88 self._handle_request_noblock() 90 self._handle_request_noblock()
89 91
90class BitBakeXMLRPCServerCommands(): 92class BitBakeXMLRPCServerCommands:
91 93
92 def __init__(self, server): 94 def __init__(self, server):
93 self.server = server 95 self.server = server
94 self.has_client = False 96 self.has_client = False
97 self.event_handle = None
95 98
96 def registerEventHandler(self, host, port): 99 def registerEventHandler(self, host, port):
97 """ 100 """
@@ -100,8 +103,8 @@ class BitBakeXMLRPCServerCommands():
100 s, t = bb.server.xmlrpcclient._create_server(host, port) 103 s, t = bb.server.xmlrpcclient._create_server(host, port)
101 104
102 # we don't allow connections if the cooker is running 105 # we don't allow connections if the cooker is running
103 if (self.server.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]): 106 if self.server.cooker.state in [bb.cooker.State.PARSING, bb.cooker.State.RUNNING]:
104 return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.server.cooker.state) 107 return None, f"Cooker is busy: {self.server.cooker.state.name}"
105 108
106 self.event_handle = bb.event.register_UIHhandler(s, True) 109 self.event_handle = bb.event.register_UIHhandler(s, True)
107 return self.event_handle, 'OK' 110 return self.event_handle, 'OK'
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 8ab08ec961..a6163b55ea 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -201,9 +201,6 @@ class SignatureGenerator(object):
201 def save_unitaskhashes(self): 201 def save_unitaskhashes(self):
202 return 202 return
203 203
204 def copy_unitaskhashes(self, targetdir):
205 return
206
207 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
208 return 205 return
209 206
@@ -381,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
381 self.taints[tid] = taint 378 self.taints[tid] = taint
382 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
383 380
384 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
385 382
386 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
387 384
@@ -418,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
418 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
419 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
420 417
421 def copy_unitaskhashes(self, targetdir):
422 self.unihash_cache.copyfile(targetdir)
423
424 def dump_sigtask(self, mcfn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
425 tid = mcfn + ":" + task 419 tid = mcfn + ":" + task
426 mc = bb.runqueue.mc_from_tid(mcfn) 420 mc = bb.runqueue.mc_from_tid(mcfn)
@@ -540,7 +534,7 @@ class SignatureGeneratorUniHashMixIn(object):
540 def __init__(self, data): 534 def __init__(self, data):
541 self.extramethod = {} 535 self.extramethod = {}
542 # NOTE: The cache only tracks hashes that exist. Hashes that don't 536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
543 # exist are always queries from the server since it is possible for 537 # exist are always queried from the server since it is possible for
544 # hashes to appear over time, but much less likely for them to 538 # hashes to appear over time, but much less likely for them to
545 # disappear 539 # disappear
546 self.unihash_exists_cache = set() 540 self.unihash_exists_cache = set()
@@ -558,11 +552,11 @@ class SignatureGeneratorUniHashMixIn(object):
558 super().__init__(data) 552 super().__init__(data)
559 553
560 def get_taskdata(self): 554 def get_taskdata(self):
561 return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
562 556
563 def set_taskdata(self, data): 557 def set_taskdata(self, data):
564 self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
565 super().set_taskdata(data[7:]) 559 super().set_taskdata(data[6:])
566 560
567 def get_hashserv_creds(self): 561 def get_hashserv_creds(self):
568 if self.username and self.password: 562 if self.username and self.password:
@@ -595,13 +589,6 @@ class SignatureGeneratorUniHashMixIn(object):
595 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) 589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
596 yield self._client 590 yield self._client
597 591
598 @contextmanager
599 def client_pool(self):
600 with self._client_env():
601 if getattr(self, '_client_pool', None) is None:
602 self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
603 yield self._client_pool
604
605 def reset(self, data): 592 def reset(self, data):
606 self.__close_clients() 593 self.__close_clients()
607 return super().reset(data) 594 return super().reset(data)
@@ -678,25 +665,20 @@ class SignatureGeneratorUniHashMixIn(object):
678 if len(query) == 0: 665 if len(query) == 0:
679 return {} 666 return {}
680 667
681 uncached_query = {} 668 query_keys = []
682 result = {} 669 result = {}
683 for key, unihash in query.items(): 670 for key, unihash in query.items():
684 if unihash in self.unihash_exists_cache: 671 if unihash in self.unihash_exists_cache:
685 result[key] = True 672 result[key] = True
686 else: 673 else:
687 uncached_query[key] = unihash 674 query_keys.append(key)
688 675
689 if self.max_parallel <= 1 or len(uncached_query) <= 1: 676 if query_keys:
690 # No parallelism required. Make the query serially with the single client
691 with self.client() as client: 677 with self.client() as client:
692 uncached_result = { 678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
693 key: client.unihash_exists(value) for key, value in uncached_query.items()
694 }
695 else:
696 with self.client_pool() as client_pool:
697 uncached_result = client_pool.unihashes_exist(uncached_query)
698 679
699 for key, exists in uncached_result.items(): 680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
700 if exists: 682 if exists:
701 self.unihash_exists_cache.add(query[key]) 683 self.unihash_exists_cache.add(query[key])
702 result[key] = exists 684 result[key] = exists
@@ -712,29 +694,24 @@ class SignatureGeneratorUniHashMixIn(object):
712 unihash 694 unihash
713 """ 695 """
714 result = {} 696 result = {}
715 queries = {} 697 query_tids = []
716 query_result = {}
717 698
718 for tid in tids: 699 for tid in tids:
719 unihash = self.get_cached_unihash(tid) 700 unihash = self.get_cached_unihash(tid)
720 if unihash: 701 if unihash:
721 result[tid] = unihash 702 result[tid] = unihash
722 else: 703 else:
723 queries[tid] = (self._get_method(tid), self.taskhash[tid]) 704 query_tids.append(tid)
724
725 if len(queries) == 0:
726 return result
727 705
728 if self.max_parallel <= 1 or len(queries) <= 1: 706 if query_tids:
729 # No parallelism required. Make the query serially with the single client 707 unihashes = []
730 with self.client() as client: 708 try:
731 for tid, args in queries.items(): 709 with self.client() as client:
732 query_result[tid] = client.get_unihash(*args) 710 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
733 else: 711 except (ConnectionError, FileNotFoundError) as e:
734 with self.client_pool() as client_pool: 712 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
735 query_result = client_pool.get_unihashes(queries)
736 713
737 for tid, unihash in query_result.items(): 714 for idx, tid in enumerate(query_tids):
738 # In the absence of being able to discover a unique hash from the 715 # In the absence of being able to discover a unique hash from the
739 # server, make it be equivalent to the taskhash. The unique "hash" only 716 # server, make it be equivalent to the taskhash. The unique "hash" only
740 # really needs to be a unique string (not even necessarily a hash), but 717 # really needs to be a unique string (not even necessarily a hash), but
@@ -749,7 +726,9 @@ class SignatureGeneratorUniHashMixIn(object):
749 # to the server, there is a better chance that they will agree on 726 # to the server, there is a better chance that they will agree on
750 # the unique hash. 727 # the unique hash.
751 taskhash = self.taskhash[tid] 728 taskhash = self.taskhash[tid]
752 if unihash: 729
730 if unihashes and unihashes[idx]:
731 unihash = unihashes[idx]
753 # A unique hash equal to the taskhash is not very interesting, 732 # A unique hash equal to the taskhash is not very interesting,
754 # so it is reported it at debug level 2. If they differ, that 733 # so it is reported it at debug level 2. If they differ, that
755 # is much more interesting, so it is reported at debug level 1 734 # is much more interesting, so it is reported at debug level 1
@@ -758,7 +737,6 @@ class SignatureGeneratorUniHashMixIn(object):
758 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 737 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
759 unihash = taskhash 738 unihash = taskhash
760 739
761
762 self.set_unihash(tid, unihash) 740 self.set_unihash(tid, unihash)
763 self.unihash[tid] = unihash 741 self.unihash[tid] = unihash
764 result[tid] = unihash 742 result[tid] = unihash
@@ -839,7 +817,7 @@ class SignatureGeneratorUniHashMixIn(object):
839 d.setVar('BB_UNIHASH', new_unihash) 817 d.setVar('BB_UNIHASH', new_unihash)
840 else: 818 else:
841 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 819 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
842 except ConnectionError as e: 820 except (ConnectionError, FileNotFoundError) as e:
843 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 821 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
844 finally: 822 finally:
845 if sigfile: 823 if sigfile:
@@ -881,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object):
881 # TODO: What to do here? 859 # TODO: What to do here?
882 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 860 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
883 861
884 except ConnectionError as e: 862 except (ConnectionError, FileNotFoundError) as e:
885 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 863 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
886 864
887 return False 865 return False
@@ -895,13 +873,12 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
895 super().init_rundepcheck(data) 873 super().init_rundepcheck(data)
896 self.server = data.getVar('BB_HASHSERVE') 874 self.server = data.getVar('BB_HASHSERVE')
897 self.method = "sstate_output_hash" 875 self.method = "sstate_output_hash"
898 self.max_parallel = 1
899 876
900def clean_checksum_file_path(file_checksum_tuple): 877def clean_checksum_file_path(file_checksum_tuple):
901 f, cs = file_checksum_tuple 878 f, cs = file_checksum_tuple
902 if "/./" in f: 879 if "/./" in f:
903 return "./" + f.split("/./")[1] 880 return "./" + f.split("/./")[1]
904 return f 881 return os.path.basename(f)
905 882
906def dump_this_task(outfile, d): 883def dump_this_task(outfile, d):
907 import bb.parse 884 import bb.parse
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index f6585fb3aa..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
106 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
107 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
108 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
109 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
110 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
111 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
index 95af3f96d7..16c297b315 100644
--- a/bitbake/lib/bb/tests/compression.py
+++ b/bitbake/lib/bb/tests/compression.py
@@ -66,8 +66,8 @@ class CompressionTests(object):
66 66
67class LZ4Tests(CompressionTests, unittest.TestCase): 67class LZ4Tests(CompressionTests, unittest.TestCase):
68 def setUp(self): 68 def setUp(self):
69 if shutil.which("lz4c") is None: 69 if shutil.which("lz4") is None:
70 self.skipTest("'lz4c' not found") 70 self.skipTest("'lz4' not found")
71 super().setUp() 71 super().setUp()
72 72
73 @contextlib.contextmanager 73 @contextlib.contextmanager
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index cbc7c1ecd4..a895f6a58e 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -450,17 +450,64 @@ class TestFlags(unittest.TestCase):
450 self.d = bb.data.init() 450 self.d = bb.data.init()
451 self.d.setVar("foo", "value of foo") 451 self.d.setVar("foo", "value of foo")
452 self.d.setVarFlag("foo", "flag1", "value of flag1") 452 self.d.setVarFlag("foo", "flag1", "value of flag1")
453 self.d.setVarFlag("foo", "_defaultval_flag_flag1", "default of flag1")
453 self.d.setVarFlag("foo", "flag2", "value of flag2") 454 self.d.setVarFlag("foo", "flag2", "value of flag2")
455 self.d.setVarFlag("foo", "_defaultval_flag_flag2", "default of flag2")
456 self.d.setVarFlag("foo", "flag3", "value of flag3")
457 self.d.setVarFlag("foo", "_defaultval_flag_flagnovalue", "default of flagnovalue")
454 458
455 def test_setflag(self): 459 def test_setflag(self):
456 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 460 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
457 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") 461 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
462 self.assertDictEqual(
463 self.d.getVarFlags("foo"),
464 {
465 "flag1": "value of flag1",
466 "flag2": "value of flag2",
467 "flag3": "value of flag3",
468 "flagnovalue": "default of flagnovalue",
469 }
470 )
471 self.assertDictEqual(
472 self.d.getVarFlags("foo", internalflags=True),
473 {
474 "_content": "value of foo",
475 "flag1": "value of flag1",
476 "flag2": "value of flag2",
477 "flag3": "value of flag3",
478 "_defaultval_flag_flag1": "default of flag1",
479 "_defaultval_flag_flag2": "default of flag2",
480 "_defaultval_flag_flagnovalue": "default of flagnovalue",
481 }
482 )
458 483
459 def test_delflag(self): 484 def test_delflag(self):
460 self.d.delVarFlag("foo", "flag2") 485 self.d.delVarFlag("foo", "flag2")
486 self.d.delVarFlag("foo", "flag3")
461 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 487 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
462 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) 488 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
463 489 self.assertDictEqual(
490 self.d.getVarFlags("foo"),
491 {
492 "flag1": "value of flag1",
493 "flagnovalue": "default of flagnovalue",
494 }
495 )
496 self.assertDictEqual(
497 self.d.getVarFlags("foo", internalflags=True),
498 {
499 "_content": "value of foo",
500 "flag1": "value of flag1",
501 "_defaultval_flag_flag1": "default of flag1",
502 "_defaultval_flag_flagnovalue": "default of flagnovalue",
503 }
504 )
505
506 def test_delvar(self):
507 self.d.delVar("foo")
508 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), None)
509 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
510 self.assertEqual(self.d.getVarFlags("foo", internalflags=True), None)
464 511
465class Contains(unittest.TestCase): 512class Contains(unittest.TestCase):
466 def setUp(self): 513 def setUp(self):
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
new file mode 100644
index 0000000000..e27ee134f2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
@@ -0,0 +1,3528 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
3 "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
4<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
5<head>
6<title>MiniUPnP download zone</title>
7<link href="../css/miniupnp.css" rel="stylesheet" type="text/css"/>
8<meta name="description" content="files download of the miniupnp project"/>
9<meta name="keywords" content="upnp,download,openbsd,freebsd,linux,windows"/>
10<meta name="viewport" content="width=device-width" />
11<link href="rss.php" title="MiniUPnPd, MiniUPnPc and MiniSSDPd Files" type="application/rss+xml" rel="alternate" />
12<link rel="canonical" href="http://miniupnp.free.fr/files/" />
13<link rel="alternate" hreflang="fr" href="/files/index_fr.php" />
14<script async="async" src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js" type="text/javascript"></script>
15<script type="text/javascript">
16 (adsbygoogle = window.adsbygoogle || []).push({
17 google_ad_client: "ca-pub-6883148866513192",
18 enable_page_level_ads: true
19 });
20</script>
21</head>
22<body>
23<h2>MiniUPnP Project</h2>
24
25<p align="center">
26<a href="../">Home</a> |
27<b>Downloads</b> |
28<a href="../devicelist.php">Compatibility list</a> |
29<a href="../libnatpmp.html">libnatpmp</a> |
30<a href="../minissdpd.html">MiniSSDPd</a> |
31<a href="../xchat-upnp.html">xchat upnp patch</a> |
32<a href="../search.html">Search</a> |
33<a href="https://miniupnp.tuxfamily.org/forum/">Forum</a>
34</p>
35<p align="center">
36<b>English</b> | <a href="/files/index_fr.php">Fran&ccedil;ais</a>
37</p>
38
39<div align="center">
40<script type="text/javascript"><!--
41google_ad_client = "pub-6883148866513192";
42/* 728x90, created 7/10/08 */
43google_ad_slot = "0774293141";
44google_ad_width = 728;
45google_ad_height = 90;
46//-->
47</script>
48<script type="text/javascript"
49src="https://pagead2.googlesyndication.com/pagead/show_ads.js">
50</script>
51</div>
52
53<h2>MiniUPnP download zone</h2>
54<p>
55Find on this page the source of miniupnp and
56some related files. You will also find precompiled binaries
57of the UPnP client sample program for windows compiled using
58<a href="https://mingw.osdn.io/">MinGW</a>. There are also Windows
59binaries (including python module) automatically built using
60<a href="https://ci.appveyor.com/project/miniupnp/miniupnp/build/artifacts">AppVeyor</a>.
61</p>
62<p>If you just need one of the software installed on your machine,
63you probably don't need to download and compile the source files.
64It is very likely that a package/port already exists for
65your system/distribution. Refer to your system documentation
66to find how to search and install a package/port.
67Mac OS X does have port systems too : see
68<a href="http://www.macports.org/">MacPorts</a> or
69<a href="http://mxcl.github.com/homebrew/">Homebrew</a> or
70<a href="http://www.finkproject.org/">Fink</a>.
71</p>
72<p>
73The miniupnpc (client) sources have been successfully compiled
74under Windows XP/vista/7/10/etc. (using
75<a href="https://mingw.osdn.io/">MinGW</a>,
76<a href="https://www.mingw-w64.org/">Mingw-w64</a>
77or <a href="http://www.cygwin.com/">Cygwin</a>),
78Linux, OpenBSD, FreeBSD, NetBSD, DragonFlyBSD,
79Solaris, MacOS X and AmigaOS. <br/>
80The Makefile of the client is made for GNU make :
81check which version your system have
82with the command "make --version". On some systems, such as OpenBSD,
83you have to use "gmake". Under Windows with MinGW, GNU make is
84called "mingw32-make" and a sligthly modified version of the Makefile
85should be used : Makefile.mingw. Run "mingw32make.bat" to compile. <br/>
86If you have any compatibility problem, please post on the
87<a href="https://miniupnp.tuxfamily.org/forum/">forum</a>
88or contact me by email.
89</p>
90<!--
91<p>A devoted user compiled miniupnp<strong>c</strong> for
92Openwrt (currently Kamikaze 7.09)
93and his work is available here :
94<a href="http://replay.waybackmachine.org/20081120030628/http://www.myantihero.net/pub/openwrt/packages/">http://myantihero.net/pub/openwrt/packages/</a>.</p>
95-->
96<p>Get miniupnpc under AmigaOS 4 on
97<a href="http://os4depot.net/index.php?function=showfile&amp;file=network/misc/miniupnpc.lha">OS4Depot</a>.
98</p>
99<p>
100Dario Meloni has made a Ruby Gem embedding miniupnpc :
101<a href="https://rubygems.org/gems/mupnp">https://rubygems.org/gems/mupnp</a>.
102</p>
103<p>
104The python module is available on pypi.org :
105<a href="https://pypi.org/project/miniupnpc/">pip install miniupnpc</a>.
106</p>
107<p>
108The daemon (starting in November 2006) compiles with BSD make under BSD
109and Solaris.<br/>
110To compile the daemon under linux, use "make -f Makefile.linux"<br/>
111To compile for <a href="http://openwrt.org/">OpenWRT</a>
112please read the README.openwrt file, or use the packages
113<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpc</a> and
114<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpd</a>.
115<!-- The
116<a href="http://www.x-wrt.org/">X-Wrt</a> project is providing
117precompiled ipkg packages for OpenWrt for both OpenWrt
118<a href="ftp://ftp.berlios.de/pub/xwrt/packages/">White Russian</a>
119and OpenWrt
120<a href="ftp://ftp.berlios.de/pub/xwrt/kamikaze/packages">kamikaze</a>.
121Check
122<a href="ftp://ftp.berlios.de/pub/xwrt/">ftp://ftp.berlios.de/pub/xwrt/</a>.
123For White Russian, take a look at
124<a href="http://jackassofalltrades.com/openwrt/">this</a>. -->
125<br/>
126<a href="http://pfsense.com">pfSense</a> users are advised to use the
127miniupnpd port available for their system. Recent versions of
128pfSense include MiniUPnPd in the base system.
129<br/>
130For <a href="http://en.wikipedia.org/wiki/WRT54G">Linksys WRT54G</a>
131and WRT54GL owners,
132<a href="http://sourceforge.net/projects/tarifa/">Tarifa firmware</a>
133is another alternative to get miniUPnPd running on the router.
134</p>
135<p>
136Please read README and
137LICENCE files included with the distribution for further informations.
138</p>
139<p>
140The MiniUPnP daemon (miniupnpd) is working under
141<a href="http://www.openbsd.org/">OpenBSD</a>,
142<a href="http://www.netbsd.org/">NetBSD</a>,
143<a href="http://www.freebsd.org/">FreeBSD</a>,
144<a href="http://www.dragonflybsd.org/">DragonFlyBSD</a>,
145<a href="http://www.apple.com/macosx/">Mac OS X</a> and
146(<a href="https://en.wikipedia.org/wiki/OpenSolaris">Open</a>)<a href="http://www.oracle.com/us/products/servers-storage/solaris/solaris11/overview/index.html">Solaris</a>
147with <a href="http://www.openbsd.org/faq/pf/">pf</a>,
148with <a href="https://en.wikipedia.org/wiki/IPFilter">IP Filter</a> or
149with <a href="http://en.wikipedia.org/wiki/Ipfirewall">ipfw</a>.
150The linux version uses either libiptc which permits to access
151<a href="http://netfilter.org/">netfilter</a>
152rules inside the kernel the same way as
153<a href="https://www.netfilter.org/projects/iptables/index.html">iptables</a>, or
154<a href="https://www.netfilter.org/projects/libnftnl/index.html">libnftnl</a>
155which is the equivalent for
156<a href="https://www.netfilter.org/projects/nftables/index.html">nftables</a>.
157</p>
158
159<p>Releases are now GPG signed with the key <a href="../A31ACAAF.asc">A31ACAAF</a>.
160Previous signing key was <a href="../A5C0863C.asc">A5C0863C</a>.
161Get it from your favorite
162<a href="https://pgp.mit.edu/pks/lookup?search=0xA31ACAAF&amp;op=index&amp;fingerprint=on">key server</a>.</p>
163
164<h4>REST API</h4>
165<p>You can use the REST API to get the latest releases available:</p>
166<ul>
167<li><a href="rest.php/tags/miniupnpd?count=1">rest.php/tags/miniupnpd?count=1</a>: latest miniupnpd.</li>
168<li><a href="rest.php/tags?count=1">rest.php/tags?count=1</a>: miniupnpc, miniupnpd and minissdpd.</li>
169</ul>
170
171<h4>You can help !</h4>
172<p>If you make a package/port for your favorite OS distribution,
173inform me so I can upload the package here or add a link to your
174repository.
175</p>
176
177<h4>Latest files</h4>
178<table>
179<tr><th>name</th>
180<th>size</th>
181<th>date</th>
182<th>comment</th>
183<th><!-- Changelog --></th>
184<th><!-- Signature --></th>
185</tr>
186<tr>
187 <td class="filename"><a href='miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
188 <td class="filesize">140137</td>
189 <td class="filedate">05/03/2025 10:31</td>
190 <td class="comment">MiniUPnP client release source code</td>
191 <td><a href="changelog.php?file=miniupnpc-2.3.2.tar.gz">changelog</a></td>
192 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
193</tr>
194<tr>
195 <td class="filename"><a href='miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
196 <td class="filesize">265329</td>
197 <td class="filedate">22/06/2024 22:31</td>
198 <td class="comment">MiniUPnP daemon release source code</td>
199 <td><a href="changelog.php?file=miniupnpd-2.3.7.tar.gz">changelog</a></td>
200 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
201</tr>
202<tr>
203 <td class="filename"><a href='libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
204 <td class="filesize">26506</td>
205 <td class="filedate">23/04/2023 11:02</td>
206 <td class="comment">latest libnatpmp source code</td>
207 <td><a href="changelog.php?file=libnatpmp-20230423.tar.gz">changelog</a></td>
208 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
209</tr>
210<tr>
211 <td class="filename"><a href='minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
212 <td class="filesize">39077</td>
213 <td class="filedate">22/10/2022 18:41</td>
214 <td class="comment">MiniSSDPd release source code</td>
215 <td><a href="changelog.php?file=minissdpd-1.6.0.tar.gz">changelog</a></td>
216 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
217</tr>
218<tr>
219 <td class="filename"><a href='upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
220 <td class="filesize">69503</td>
221 <td class="filedate">15/05/2022 14:31</td>
222 <td class="comment">Windows executable</td>
223 <td><a href="changelog.php?file=upnpc-exe-win32-20220515.zip">changelog</a></td>
224 <td></td>
225</tr>
226<tr>
227 <td class="filename"><a href='minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
228 <td class="filesize">38870</td>
229 <td class="filedate">04/11/2021 23:34</td>
230 <td class="comment">latest MiniSSDPd source code</td>
231 <td><a href="changelog.php?file=minissdpd-1.5.20211105.tar.gz">changelog</a></td>
232 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
233</tr>
234<tr>
235 <td class="filename"><a href='miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
236 <td class="filesize">97682</td>
237 <td class="filedate">15/10/2020 22:31</td>
238 <td class="comment">latest MiniUPnP client source code</td>
239 <td><a href="changelog.php?file=miniupnpc-2.1.20201016.tar.gz">changelog</a></td>
240 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
241</tr>
242<tr>
243 <td class="filename"><a href='miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
244 <td class="filesize">245426</td>
245 <td class="filedate">10/05/2020 18:23</td>
246 <td class="comment">latest MiniUPnP daemon source code</td>
247 <td><a href="changelog.php?file=miniupnpd-2.1.20200510.tar.gz">changelog</a></td>
248 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
249</tr>
250<tr>
251 <td class="filename"><a href='xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
252 <td class="filesize">10329</td>
253 <td class="filedate">11/08/2011 15:18</td>
254 <td class="comment">Patch to add UPnP capabilities to xchat</td>
255 <td><a href="changelog.php?file=xchat-upnp20110811.patch">changelog</a></td>
256 <td></td>
257</tr>
258<tr>
259 <td class="filename"><a href='minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
260 <td class="filesize">7598</td>
261 <td class="filedate">25/07/2011 14:57</td>
262 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
263 <td><a href="changelog.php?file=minidlna_1.0.21.minissdp1.patch">changelog</a></td>
264 <td></td>
265</tr>
266<tr>
267 <td class="filename"><a href='miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
268 <td class="filesize">14840</td>
269 <td class="filedate">04/11/2006 18:16</td>
270 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
271 <td><a href="changelog.php?file=miniupnpc-new20060630.tar.gz">changelog</a></td>
272 <td></td>
273</tr>
274</table>
275
276<h4>All files</h4>
277<table>
278<tr><th>name</th>
279<th>size</th>
280<th>date</th>
281<th>comment</th>
282<th><!-- signature --></th>
283</tr>
284<tr>
285 <td class="filename"><a href='download.php?file=miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
286 <td class="filesize">140137</td>
287 <td class="filedate">05/03/2025 10:31:36 +0000</td>
288 <td class="comment">MiniUPnP client release source code</td>
289 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
290</tr>
291<tr>
292 <td class="filename"><a href='download.php?file=miniupnpc-2.3.1.tar.gz'>miniupnpc-2.3.1.tar.gz</a></td>
293 <td class="filesize">139499</td>
294 <td class="filedate">23/02/2025 16:44:16 +0000</td>
295 <td class="comment">MiniUPnP client release source code</td>
296 <td><a href="miniupnpc-2.3.1.tar.gz.sig">Signature</a></td>
297</tr>
298<tr>
299 <td class="filename"><a href='download.php?file=miniupnpc-2.3.0.tar.gz'>miniupnpc-2.3.0.tar.gz</a></td>
300 <td class="filesize">105071</td>
301 <td class="filedate">10/01/2025 23:16:45 +0000</td>
302 <td class="comment">MiniUPnP client release source code</td>
303 <td><a href="miniupnpc-2.3.0.tar.gz.sig">Signature</a></td>
304</tr>
305<tr>
306 <td class="filename"><a href='download.php?file=miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
307 <td class="filesize">265329</td>
308 <td class="filedate">22/06/2024 22:31:38 +0000</td>
309 <td class="comment">MiniUPnP daemon release source code</td>
310 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
311</tr>
312<tr>
313 <td class="filename"><a href='download.php?file=miniupnpc-2.2.8.tar.gz'>miniupnpc-2.2.8.tar.gz</a></td>
314 <td class="filesize">104603</td>
315 <td class="filedate">08/06/2024 22:13:39 +0000</td>
316 <td class="comment">MiniUPnP client release source code</td>
317 <td><a href="miniupnpc-2.2.8.tar.gz.sig">Signature</a></td>
318</tr>
319<tr>
320 <td class="filename"><a href='download.php?file=miniupnpd-2.3.6.tar.gz'>miniupnpd-2.3.6.tar.gz</a></td>
321 <td class="filesize">263018</td>
322 <td class="filedate">19/03/2024 23:39:51 +0000</td>
323 <td class="comment">MiniUPnP daemon release source code</td>
324 <td><a href="miniupnpd-2.3.6.tar.gz.sig">Signature</a></td>
325</tr>
326<tr>
327 <td class="filename"><a href='download.php?file=miniupnpc-2.2.7.tar.gz'>miniupnpc-2.2.7.tar.gz</a></td>
328 <td class="filesize">104258</td>
329 <td class="filedate">19/03/2024 23:25:18 +0000</td>
330 <td class="comment">MiniUPnP client release source code</td>
331 <td><a href="miniupnpc-2.2.7.tar.gz.sig">Signature</a></td>
332</tr>
333<tr>
334 <td class="filename"><a href='download.php?file=miniupnpd-2.3.5.tar.gz'>miniupnpd-2.3.5.tar.gz</a></td>
335 <td class="filesize">261952</td>
336 <td class="filedate">02/03/2024 11:04:07 +0000</td>
337 <td class="comment">MiniUPnP daemon release source code</td>
338 <td><a href="miniupnpd-2.3.5.tar.gz.sig">Signature</a></td>
339</tr>
340<tr>
341 <td class="filename"><a href='download.php?file=miniupnpd-2.3.4.tar.gz'>miniupnpd-2.3.4.tar.gz</a></td>
342 <td class="filesize">260810</td>
343 <td class="filedate">04/01/2024 00:53:17 +0000</td>
344 <td class="comment">MiniUPnP daemon release source code</td>
345 <td><a href="miniupnpd-2.3.4.tar.gz.sig">Signature</a></td>
346</tr>
347<tr>
348 <td class="filename"><a href='download.php?file=miniupnpc-2.2.6.tar.gz'>miniupnpc-2.2.6.tar.gz</a></td>
349 <td class="filesize">103949</td>
350 <td class="filedate">04/01/2024 00:27:14 +0000</td>
351 <td class="comment">MiniUPnP client release source code</td>
352 <td><a href="miniupnpc-2.2.6.tar.gz.sig">Signature</a></td>
353</tr>
354<tr>
355 <td class="filename"><a href='download.php?file=miniupnpc-2.2.5.tar.gz'>miniupnpc-2.2.5.tar.gz</a></td>
356 <td class="filesize">103654</td>
357 <td class="filedate">11/06/2023 23:14:56 +0000</td>
358 <td class="comment">MiniUPnP client release source code</td>
359 <td><a href="miniupnpc-2.2.5.tar.gz.sig">Signature</a></td>
360</tr>
361<tr>
362 <td class="filename"><a href='download.php?file=libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
363 <td class="filesize">26506</td>
364 <td class="filedate">23/04/2023 11:02:09 +0000</td>
365 <td class="comment">libnatpmp source code</td>
366 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
367</tr>
368<tr>
369 <td class="filename"><a href='download.php?file=miniupnpd-2.3.3.tar.gz'>miniupnpd-2.3.3.tar.gz</a></td>
370 <td class="filesize">260079</td>
371 <td class="filedate">17/02/2023 03:07:46 +0000</td>
372 <td class="comment">MiniUPnP daemon release source code</td>
373 <td><a href="miniupnpd-2.3.3.tar.gz.sig">Signature</a></td>
374</tr>
375<tr>
376 <td class="filename"><a href='download.php?file=miniupnpd-2.3.2.tar.gz'>miniupnpd-2.3.2.tar.gz</a></td>
377 <td class="filesize">259686</td>
378 <td class="filedate">19/01/2023 23:18:08 +0000</td>
379 <td class="comment">MiniUPnP daemon release source code</td>
380 <td><a href="miniupnpd-2.3.2.tar.gz.sig">Signature</a></td>
381</tr>
382<tr>
383 <td class="filename"><a href='download.php?file=minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
384 <td class="filesize">39077</td>
385 <td class="filedate">22/10/2022 18:41:54 +0000</td>
386 <td class="comment">MiniSSDPd release source code</td>
387 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
388</tr>
389<tr>
390 <td class="filename"><a href='download.php?file=miniupnpc-2.2.4.tar.gz'>miniupnpc-2.2.4.tar.gz</a></td>
391 <td class="filesize">102932</td>
392 <td class="filedate">21/10/2022 21:01:01 +0000</td>
393 <td class="comment">MiniUPnP client release source code</td>
394 <td><a href="miniupnpc-2.2.4.tar.gz.sig">Signature</a></td>
395</tr>
396<tr>
397 <td class="filename"><a href='download.php?file=miniupnpd-2.3.1.tar.gz'>miniupnpd-2.3.1.tar.gz</a></td>
398 <td class="filesize">258050</td>
399 <td class="filedate">16/10/2022 05:58:44 +0000</td>
400 <td class="comment">MiniUPnP daemon release source code</td>
401 <td><a href="miniupnpd-2.3.1.tar.gz.sig">Signature</a></td>
402</tr>
403<tr>
404 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
405 <td class="filesize">69503</td>
406 <td class="filedate">15/05/2022 14:31:25 +0000</td>
407 <td class="comment">Windows executable</td>
408 <td></td>
409</tr>
410<tr>
411 <td class="filename"><a href='download.php?file=hexchat-2.16.patch'>hexchat-2.16.patch</a></td>
412 <td class="filesize">8147</td>
413 <td class="filedate">19/03/2022 16:52:05 +0000</td>
414 <td class="comment"></td>
415 <td></td>
416</tr>
417<tr>
418 <td class="filename"><a href='download.php?file=miniupnpd-2.3.0.tar.gz'>miniupnpd-2.3.0.tar.gz</a></td>
419 <td class="filesize">256069</td>
420 <td class="filedate">23/01/2022 00:23:32 +0000</td>
421 <td class="comment">MiniUPnP daemon release source code</td>
422 <td><a href="miniupnpd-2.3.0.tar.gz.sig">Signature</a></td>
423</tr>
424<tr>
425 <td class="filename"><a href='download.php?file=minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
426 <td class="filesize">38870</td>
427 <td class="filedate">04/11/2021 23:34:49 +0000</td>
428 <td class="comment">MiniSSDPd source code</td>
429 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
430</tr>
431<tr>
432 <td class="filename"><a href='download.php?file=miniupnpc-2.2.3.tar.gz'>miniupnpc-2.2.3.tar.gz</a></td>
433 <td class="filesize">101360</td>
434 <td class="filedate">28/09/2021 21:43:32 +0000</td>
435 <td class="comment">MiniUPnP client release source code</td>
436 <td><a href="miniupnpc-2.2.3.tar.gz.sig">Signature</a></td>
437</tr>
438<tr>
439 <td class="filename"><a href='download.php?file=miniupnpd-2.2.3.tar.gz'>miniupnpd-2.2.3.tar.gz</a></td>
440 <td class="filesize">254752</td>
441 <td class="filedate">21/08/2021 08:35:13 +0000</td>
442 <td class="comment">MiniUPnP daemon release source code</td>
443 <td><a href="miniupnpd-2.2.3.tar.gz.sig">Signature</a></td>
444</tr>
445<tr>
446 <td class="filename"><a href='download.php?file=miniupnpd-2.2.2.tar.gz'>miniupnpd-2.2.2.tar.gz</a></td>
447 <td class="filesize">250649</td>
448 <td class="filedate">13/05/2021 11:30:11 +0000</td>
449 <td class="comment">MiniUPnP daemon release source code</td>
450 <td><a href="miniupnpd-2.2.2.tar.gz.sig">Signature</a></td>
451</tr>
452<tr>
453 <td class="filename"><a href='download.php?file=miniupnpc-2.2.2.tar.gz'>miniupnpc-2.2.2.tar.gz</a></td>
454 <td class="filesize">100008</td>
455 <td class="filedate">02/03/2021 23:44:52 +0000</td>
456 <td class="comment">MiniUPnP client release source code</td>
457 <td><a href="miniupnpc-2.2.2.tar.gz.sig">Signature</a></td>
458</tr>
459<tr>
460 <td class="filename"><a href='download.php?file=miniupnpd-2.2.1.tar.gz'>miniupnpd-2.2.1.tar.gz</a></td>
461 <td class="filesize">250023</td>
462 <td class="filedate">20/12/2020 18:08:08 +0000</td>
463 <td class="comment">MiniUPnP daemon release source code</td>
464 <td><a href="miniupnpd-2.2.1.tar.gz.sig">Signature</a></td>
465</tr>
466<tr>
467 <td class="filename"><a href='download.php?file=miniupnpc-2.2.1.tar.gz'>miniupnpc-2.2.1.tar.gz</a></td>
468 <td class="filesize">99595</td>
469 <td class="filedate">20/12/2020 18:08:02 +0000</td>
470 <td class="comment">MiniUPnP client release source code</td>
471 <td><a href="miniupnpc-2.2.1.tar.gz.sig">Signature</a></td>
472</tr>
473<tr>
474 <td class="filename"><a href='download.php?file=miniupnpc-2.2.0.tar.gz'>miniupnpc-2.2.0.tar.gz</a></td>
475 <td class="filesize">98348</td>
476 <td class="filedate">09/11/2020 19:51:24 +0000</td>
477 <td class="comment">MiniUPnP client release source code</td>
478 <td><a href="miniupnpc-2.2.0.tar.gz.sig">Signature</a></td>
479</tr>
480<tr>
481 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0.tar.gz'>miniupnpd-2.2.0.tar.gz</a></td>
482 <td class="filesize">249858</td>
483 <td class="filedate">31/10/2020 09:20:59 +0000</td>
484 <td class="comment">MiniUPnP daemon release source code</td>
485 <td><a href="miniupnpd-2.2.0.tar.gz.sig">Signature</a></td>
486</tr>
487<tr>
488 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC3.tar.gz'>miniupnpd-2.2.0-RC3.tar.gz</a></td>
489 <td class="filesize">249879</td>
490 <td class="filedate">30/10/2020 21:49:49 +0000</td>
491 <td class="comment">MiniUPnP daemon release source code</td>
492 <td><a href="miniupnpd-2.2.0-RC3.tar.gz.sig">Signature</a></td>
493</tr>
494<tr>
495 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
496 <td class="filesize">97682</td>
497 <td class="filedate">15/10/2020 22:31:09 +0000</td>
498 <td class="comment">MiniUPnP client source code</td>
499 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
500</tr>
501<tr>
502 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC2.tar.gz'>miniupnpd-2.2.0-RC2.tar.gz</a></td>
503 <td class="filesize">248756</td>
504 <td class="filedate">28/09/2020 21:57:22 +0000</td>
505 <td class="comment">MiniUPnP daemon release source code</td>
506 <td><a href="miniupnpd-2.2.0-RC2.tar.gz.sig">Signature</a></td>
507</tr>
508<tr>
509 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20200928.tar.gz'>miniupnpc-2.1.20200928.tar.gz</a></td>
510 <td class="filesize">96508</td>
511 <td class="filedate">28/09/2020 21:56:09 +0000</td>
512 <td class="comment">MiniUPnP client source code</td>
513 <td><a href="miniupnpc-2.1.20200928.tar.gz.sig">Signature</a></td>
514</tr>
515<tr>
516 <td class="filename"><a href='download.php?file=minissdpd-1.5.20200928.tar.gz'>minissdpd-1.5.20200928.tar.gz</a></td>
517 <td class="filesize">37860</td>
518 <td class="filedate">28/09/2020 21:55:40 +0000</td>
519 <td class="comment">MiniSSDPd source code</td>
520 <td><a href="minissdpd-1.5.20200928.tar.gz.sig">Signature</a></td>
521</tr>
522<tr>
523 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC1.tar.gz'>miniupnpd-2.2.0-RC1.tar.gz</a></td>
524 <td class="filesize">247772</td>
525 <td class="filedate">06/06/2020 18:34:50 +0000</td>
526 <td class="comment">MiniUPnP daemon release source code</td>
527 <td><a href="miniupnpd-2.2.0-RC1.tar.gz.sig">Signature</a></td>
528</tr>
529<tr>
530 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC0.tar.gz'>miniupnpd-2.2.0-RC0.tar.gz</a></td>
531 <td class="filesize">245507</td>
532 <td class="filedate">16/05/2020 18:03:17 +0000</td>
533 <td class="comment">MiniUPnP daemon release source code</td>
534 <td><a href="miniupnpd-2.2.0-RC0.tar.gz.sig">Signature</a></td>
535</tr>
536<tr>
537 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
538 <td class="filesize">245426</td>
539 <td class="filedate">10/05/2020 18:23:13 +0000</td>
540 <td class="comment">MiniUPnP daemon source code</td>
541 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
542</tr>
543<tr>
544 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200329.tar.gz'>miniupnpd-2.1.20200329.tar.gz</a></td>
545 <td class="filesize">243725</td>
546 <td class="filedate">29/03/2020 09:11:02 +0000</td>
547 <td class="comment">MiniUPnP daemon source code</td>
548 <td><a href="miniupnpd-2.1.20200329.tar.gz.sig">Signature</a></td>
549</tr>
550<tr>
551 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20191224.tar.gz'>miniupnpc-2.1.20191224.tar.gz</a></td>
552 <td class="filesize">94740</td>
553 <td class="filedate">23/12/2019 23:37:32 +0000</td>
554 <td class="comment">MiniUPnP client source code</td>
555 <td><a href="miniupnpc-2.1.20191224.tar.gz.sig">Signature</a></td>
556</tr>
557<tr>
558 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191006.tar.gz'>miniupnpd-2.1.20191006.tar.gz</a></td>
559 <td class="filesize">243255</td>
560 <td class="filedate">06/10/2019 21:02:31 +0000</td>
561 <td class="comment">MiniUPnP daemon source code</td>
562 <td><a href="miniupnpd-2.1.20191006.tar.gz.sig">Signature</a></td>
563</tr>
564<tr>
565 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191005.tar.gz'>miniupnpd-2.1.20191005.tar.gz</a></td>
566 <td class="filesize">244100</td>
567 <td class="filedate">05/10/2019 21:33:08 +0000</td>
568 <td class="comment">MiniUPnP daemon source code</td>
569 <td><a href="miniupnpd-2.1.20191005.tar.gz.sig">Signature</a></td>
570</tr>
571<tr>
572 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191003.tar.gz'>miniupnpd-2.1.20191003.tar.gz</a></td>
573 <td class="filesize">243287</td>
574 <td class="filedate">02/10/2019 22:23:51 +0000</td>
575 <td class="comment">MiniUPnP daemon source code</td>
576 <td><a href="miniupnpd-2.1.20191003.tar.gz.sig">Signature</a></td>
577</tr>
578<tr>
579 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190924.tar.gz'>miniupnpd-2.1.20190924.tar.gz</a></td>
580 <td class="filesize">241008</td>
581 <td class="filedate">24/09/2019 11:58:15 +0000</td>
582 <td class="comment">MiniUPnP daemon source code</td>
583 <td><a href="miniupnpd-2.1.20190924.tar.gz.sig">Signature</a></td>
584</tr>
585<tr>
586 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190902.tar.gz'>miniupnpd-2.1.20190902.tar.gz</a></td>
587 <td class="filesize">240742</td>
588 <td class="filedate">01/09/2019 23:03:03 +0000</td>
589 <td class="comment">MiniUPnP daemon source code</td>
590 <td><a href="miniupnpd-2.1.20190902.tar.gz.sig">Signature</a></td>
591</tr>
592<tr>
593 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190824.tar.gz'>miniupnpd-2.1.20190824.tar.gz</a></td>
594 <td class="filesize">240490</td>
595 <td class="filedate">24/08/2019 09:21:52 +0000</td>
596 <td class="comment">MiniUPnP daemon source code</td>
597 <td><a href="miniupnpd-2.1.20190824.tar.gz.sig">Signature</a></td>
598</tr>
599<tr>
600 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190824.tar.gz'>minissdpd-1.5.20190824.tar.gz</a></td>
601 <td class="filesize">37300</td>
602 <td class="filedate">24/08/2019 09:17:32 +0000</td>
603 <td class="comment">MiniSSDPd source code</td>
604 <td><a href="minissdpd-1.5.20190824.tar.gz.sig">Signature</a></td>
605</tr>
606<tr>
607 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190824.tar.gz'>miniupnpc-2.1.20190824.tar.gz</a></td>
608 <td class="filesize">94564</td>
609 <td class="filedate">24/08/2019 09:12:50 +0000</td>
610 <td class="comment">MiniUPnP client source code</td>
611 <td><a href="miniupnpc-2.1.20190824.tar.gz.sig">Signature</a></td>
612</tr>
613<tr>
614 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190630.tar.gz'>miniupnpd-2.1.20190630.tar.gz</a></td>
615 <td class="filesize">240466</td>
616 <td class="filedate">30/06/2019 20:27:38 +0000</td>
617 <td class="comment">MiniUPnP daemon source code</td>
618 <td><a href="miniupnpd-2.1.20190630.tar.gz.sig">Signature</a></td>
619</tr>
620<tr>
621 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190625.tar.gz'>miniupnpd-2.1.20190625.tar.gz</a></td>
622 <td class="filesize">240120</td>
623 <td class="filedate">25/06/2019 21:33:49 +0000</td>
624 <td class="comment">MiniUPnP daemon source code</td>
625 <td><a href="miniupnpd-2.1.20190625.tar.gz.sig">Signature</a></td>
626</tr>
627<tr>
628 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190625.tar.gz'>miniupnpc-2.1.20190625.tar.gz</a></td>
629 <td class="filesize">94461</td>
630 <td class="filedate">25/06/2019 21:33:26 +0000</td>
631 <td class="comment">MiniUPnP client source code</td>
632 <td><a href="miniupnpc-2.1.20190625.tar.gz.sig">Signature</a></td>
633</tr>
634<tr>
635 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190502.tar.gz'>miniupnpd-2.1.20190502.tar.gz</a></td>
636 <td class="filesize">236052</td>
637 <td class="filedate">02/05/2019 17:22:23 +0000</td>
638 <td class="comment">MiniUPnP daemon source code</td>
639 <td><a href="miniupnpd-2.1.20190502.tar.gz.sig">Signature</a></td>
640</tr>
641<tr>
642 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190408.tar.gz'>miniupnpc-2.1.20190408.tar.gz</a></td>
643 <td class="filesize">94216</td>
644 <td class="filedate">08/04/2019 12:50:21 +0000</td>
645 <td class="comment">MiniUPnP client source code</td>
646 <td><a href="miniupnpc-2.1.20190408.tar.gz.sig">Signature</a></td>
647</tr>
648<tr>
649 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190408.tar.gz'>miniupnpd-2.1.20190408.tar.gz</a></td>
650 <td class="filesize">235989</td>
651 <td class="filedate">08/04/2019 12:50:01 +0000</td>
652 <td class="comment">MiniUPnP daemon source code</td>
653 <td><a href="miniupnpd-2.1.20190408.tar.gz.sig">Signature</a></td>
654</tr>
655<tr>
656 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190403.tar.gz'>miniupnpc-2.1.20190403.tar.gz</a></td>
657 <td class="filesize">94204</td>
658 <td class="filedate">03/04/2019 15:41:36 +0000</td>
659 <td class="comment">MiniUPnP client source code</td>
660 <td><a href="miniupnpc-2.1.20190403.tar.gz.sig">Signature</a></td>
661</tr>
662<tr>
663 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190403.tar.gz'>miniupnpd-2.1.20190403.tar.gz</a></td>
664 <td class="filesize">235909</td>
665 <td class="filedate">03/04/2019 15:41:17 +0000</td>
666 <td class="comment">MiniUPnP daemon source code</td>
667 <td><a href="miniupnpd-2.1.20190403.tar.gz.sig">Signature</a></td>
668</tr>
669<tr>
670 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190210.tar.gz'>minissdpd-1.5.20190210.tar.gz</a></td>
671 <td class="filesize">37227</td>
672 <td class="filedate">10/02/2019 15:21:49 +0000</td>
673 <td class="comment">MiniSSDPd source code</td>
674 <td><a href="minissdpd-1.5.20190210.tar.gz.sig">Signature</a></td>
675</tr>
676<tr>
677 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190210.tar.gz'>miniupnpc-2.1.20190210.tar.gz</a></td>
678 <td class="filesize">94125</td>
679 <td class="filedate">10/02/2019 12:46:09 +0000</td>
680 <td class="comment">MiniUPnP client source code</td>
681 <td><a href="miniupnpc-2.1.20190210.tar.gz.sig">Signature</a></td>
682</tr>
683<tr>
684 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190210.tar.gz'>miniupnpd-2.1.20190210.tar.gz</a></td>
685 <td class="filesize">235093</td>
686 <td class="filedate">10/02/2019 11:20:11 +0000</td>
687 <td class="comment">MiniUPnP daemon source code</td>
688 <td><a href="miniupnpd-2.1.20190210.tar.gz.sig">Signature</a></td>
689</tr>
690<tr>
691 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20180706.tar.gz'>miniupnpd-2.1.20180706.tar.gz</a></td>
692 <td class="filesize">233675</td>
693 <td class="filedate">06/07/2018 12:44:24 +0000</td>
694 <td class="comment">MiniUPnP daemon source code</td>
695 <td><a href="miniupnpd-2.1.20180706.tar.gz.sig">Signature</a></td>
696</tr>
697<tr>
698 <td class="filename"><a href='download.php?file=miniupnpd-2.1.tar.gz'>miniupnpd-2.1.tar.gz</a></td>
699 <td class="filesize">225458</td>
700 <td class="filedate">08/05/2018 21:50:32 +0000</td>
701 <td class="comment">MiniUPnP daemon release source code</td>
702 <td><a href="miniupnpd-2.1.tar.gz.sig">Signature</a></td>
703</tr>
704<tr>
705 <td class="filename"><a href='download.php?file=miniupnpc-2.1.tar.gz'>miniupnpc-2.1.tar.gz</a></td>
706 <td class="filesize">91914</td>
707 <td class="filedate">07/05/2018 11:10:59 +0000</td>
708 <td class="comment">MiniUPnP client release source code</td>
709 <td><a href="miniupnpc-2.1.tar.gz.sig">Signature</a></td>
710</tr>
711<tr>
712 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180503.tar.gz'>miniupnpd-2.0.20180503.tar.gz</a></td>
713 <td class="filesize">225454</td>
714 <td class="filedate">03/05/2018 08:33:10 +0000</td>
715 <td class="comment">MiniUPnP daemon source code</td>
716 <td></td>
717</tr>
718<tr>
719 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180503.tar.gz'>miniupnpc-2.0.20180503.tar.gz</a></td>
720 <td class="filesize">88207</td>
721 <td class="filedate">03/05/2018 08:31:22 +0000</td>
722 <td class="comment">MiniUPnP client source code</td>
723 <td></td>
724</tr>
725<tr>
726 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180422.tar.gz'>miniupnpd-2.0.20180422.tar.gz</a></td>
727 <td class="filesize">224942</td>
728 <td class="filedate">22/04/2018 19:48:54 +0000</td>
729 <td class="comment">MiniUPnP daemon source code</td>
730 <td></td>
731</tr>
732<tr>
733 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180412.tar.gz'>miniupnpd-2.0.20180412.tar.gz</a></td>
734 <td class="filesize">224831</td>
735 <td class="filedate">12/04/2018 08:16:25 +0000</td>
736 <td class="comment">MiniUPnP daemon source code</td>
737 <td></td>
738</tr>
739<tr>
740 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180410.tar.gz'>miniupnpd-2.0.20180410.tar.gz</a></td>
741 <td class="filesize">224736</td>
742 <td class="filedate">10/04/2018 07:58:28 +0000</td>
743 <td class="comment">MiniUPnP daemon source code</td>
744 <td></td>
745</tr>
746<tr>
747 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180410.tar.gz'>miniupnpc-2.0.20180410.tar.gz</a></td>
748 <td class="filesize">87363</td>
749 <td class="filedate">10/04/2018 07:52:55 +0000</td>
750 <td class="comment">MiniUPnP client source code</td>
751 <td></td>
752</tr>
753<tr>
754 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180406.tar.gz'>miniupnpc-2.0.20180406.tar.gz</a></td>
755 <td class="filesize">87374</td>
756 <td class="filedate">06/04/2018 10:55:21 +0000</td>
757 <td class="comment">MiniUPnP client source code</td>
758 <td></td>
759</tr>
760<tr>
761 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180223.tar.gz'>minissdpd-1.5.20180223.tar.gz</a></td>
762 <td class="filesize">36179</td>
763 <td class="filedate">23/02/2018 14:24:07 +0000</td>
764 <td class="comment">MiniSSDPd source code</td>
765 <td></td>
766</tr>
767<tr>
768 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180222.tar.gz'>miniupnpc-2.0.20180222.tar.gz</a></td>
769 <td class="filesize">87018</td>
770 <td class="filedate">22/02/2018 15:09:24 +0000</td>
771 <td class="comment">MiniUPnP client source code</td>
772 <td></td>
773</tr>
774<tr>
775 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180222.tar.gz'>miniupnpd-2.0.20180222.tar.gz</a></td>
776 <td class="filesize">223697</td>
777 <td class="filedate">22/02/2018 15:09:14 +0000</td>
778 <td class="comment">MiniUPnP daemon source code</td>
779 <td></td>
780</tr>
781<tr>
782 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180203.tar.gz'>miniupnpd-2.0.20180203.tar.gz</a></td>
783 <td class="filesize">223084</td>
784 <td class="filedate">03/02/2018 22:34:46 +0000</td>
785 <td class="comment">MiniUPnP daemon source code</td>
786 <td></td>
787</tr>
788<tr>
789 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180203.tar.gz'>miniupnpc-2.0.20180203.tar.gz</a></td>
790 <td class="filesize">86772</td>
791 <td class="filedate">03/02/2018 22:34:32 +0000</td>
792 <td class="comment">MiniUPnP client source code</td>
793 <td></td>
794</tr>
795<tr>
796 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180203.tar.gz'>minissdpd-1.5.20180203.tar.gz</a></td>
797 <td class="filesize">35848</td>
798 <td class="filedate">03/02/2018 22:33:08 +0000</td>
799 <td class="comment">MiniSSDPd source code</td>
800 <td></td>
801</tr>
802<tr>
803 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171212.tar.gz'>miniupnpc-2.0.20171212.tar.gz</a></td>
804 <td class="filesize">86607</td>
805 <td class="filedate">12/12/2017 12:03:38 +0000</td>
806 <td class="comment">MiniUPnP client source code</td>
807 <td></td>
808</tr>
809<tr>
810 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20171212.tar.gz'>miniupnpd-2.0.20171212.tar.gz</a></td>
811 <td class="filesize">222617</td>
812 <td class="filedate">12/12/2017 12:03:32 +0000</td>
813 <td class="comment">MiniUPnP daemon source code</td>
814 <td></td>
815</tr>
816<tr>
817 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171102.tar.gz'>miniupnpc-2.0.20171102.tar.gz</a></td>
818 <td class="filesize">86363</td>
819 <td class="filedate">02/11/2017 17:58:34 +0000</td>
820 <td class="comment">MiniUPnP client source code</td>
821 <td></td>
822</tr>
823<tr>
824 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170509.tar.gz'>miniupnpc-2.0.20170509.tar.gz</a></td>
825 <td class="filesize">86055</td>
826 <td class="filedate">09/05/2017 10:14:56 +0000</td>
827 <td class="comment">MiniUPnP client source code</td>
828 <td></td>
829</tr>
830<tr>
831 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170421.tar.gz'>miniupnpc-2.0.20170421.tar.gz</a></td>
832 <td class="filesize">85984</td>
833 <td class="filedate">21/04/2017 12:02:26 +0000</td>
834 <td class="comment">MiniUPnP client source code</td>
835 <td></td>
836</tr>
837<tr>
838 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20170421.tar.gz'>miniupnpd-2.0.20170421.tar.gz</a></td>
839 <td class="filesize">219191</td>
840 <td class="filedate">21/04/2017 12:02:06 +0000</td>
841 <td class="comment">MiniUPnP daemon source code</td>
842 <td></td>
843</tr>
844<tr>
845 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20161216.tar.gz'>miniupnpd-2.0.20161216.tar.gz</a></td>
846 <td class="filesize">218119</td>
847 <td class="filedate">16/12/2016 09:34:08 +0000</td>
848 <td class="comment">MiniUPnP daemon source code</td>
849 <td></td>
850</tr>
851<tr>
852 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20161216.tar.gz'>miniupnpc-2.0.20161216.tar.gz</a></td>
853 <td class="filesize">85780</td>
854 <td class="filedate">16/12/2016 09:34:03 +0000</td>
855 <td class="comment">MiniUPnP client source code</td>
856 <td></td>
857</tr>
858<tr>
859 <td class="filename"><a href='download.php?file=minissdpd-1.5.20161216.tar.gz'>minissdpd-1.5.20161216.tar.gz</a></td>
860 <td class="filesize">35078</td>
861 <td class="filedate">16/12/2016 09:33:59 +0000</td>
862 <td class="comment">MiniSSDPd source code</td>
863 <td></td>
864</tr>
865<tr>
866 <td class="filename"><a href='download.php?file=miniupnpd-2.0.tar.gz'>miniupnpd-2.0.tar.gz</a></td>
867 <td class="filesize">217802</td>
868 <td class="filedate">19/04/2016 21:12:01 +0000</td>
869 <td class="comment">MiniUPnP daemon release source code</td>
870 <td><a href="miniupnpd-2.0.tar.gz.sig">Signature</a></td>
871</tr>
872<tr>
873 <td class="filename"><a href='download.php?file=miniupnpc-2.0.tar.gz'>miniupnpc-2.0.tar.gz</a></td>
874 <td class="filesize">85287</td>
875 <td class="filedate">19/04/2016 21:07:52 +0000</td>
876 <td class="comment">MiniUPnP client release source code</td>
877 <td></td>
878</tr>
879<tr>
880 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160301.tar.gz'>minissdpd-1.5.20160301.tar.gz</a></td>
881 <td class="filesize">34827</td>
882 <td class="filedate">01/03/2016 18:08:23 +0000</td>
883 <td class="comment">MiniSSDPd source code</td>
884 <td></td>
885</tr>
886<tr>
887 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160222.tar.gz'>miniupnpd-1.9.20160222.tar.gz</a></td>
888 <td class="filesize">217541</td>
889 <td class="filedate">22/02/2016 10:21:40 +0000</td>
890 <td class="comment">MiniUPnP daemon source code</td>
891 <td></td>
892</tr>
893<tr>
894 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160216.tar.gz'>miniupnpd-1.9.20160216.tar.gz</a></td>
895 <td class="filesize">217007</td>
896 <td class="filedate">16/02/2016 12:41:44 +0000</td>
897 <td class="comment">MiniUPnP daemon source code</td>
898 <td></td>
899</tr>
900<tr>
901 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160212.tar.gz'>miniupnpd-1.9.20160212.tar.gz</a></td>
902 <td class="filesize">215866</td>
903 <td class="filedate">12/02/2016 15:22:04 +0000</td>
904 <td class="comment">MiniUPnP daemon source code</td>
905 <td></td>
906</tr>
907<tr>
908 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160209.tar.gz'>miniupnpd-1.9.20160209.tar.gz</a></td>
909 <td class="filesize">213416</td>
910 <td class="filedate">09/02/2016 09:47:03 +0000</td>
911 <td class="comment">MiniUPnP daemon source code</td>
912 <td></td>
913</tr>
914<tr>
915 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20160209.tar.gz'>miniupnpc-1.9.20160209.tar.gz</a></td>
916 <td class="filesize">85268</td>
917 <td class="filedate">09/02/2016 09:44:50 +0000</td>
918 <td class="comment">MiniUPnP client source code</td>
919 <td></td>
920</tr>
921<tr>
922 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160119.tar.gz'>minissdpd-1.5.20160119.tar.gz</a></td>
923 <td class="filesize">34711</td>
924 <td class="filedate">19/01/2016 13:39:51 +0000</td>
925 <td class="comment">MiniSSDPd source code</td>
926 <td></td>
927</tr>
928<tr>
929 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160113.tar.gz'>miniupnpd-1.9.20160113.tar.gz</a></td>
930 <td class="filesize">211437</td>
931 <td class="filedate">13/01/2016 16:03:14 +0000</td>
932 <td class="comment">MiniUPnP daemon source code</td>
933 <td></td>
934</tr>
935<tr>
936 <td class="filename"><a href='download.php?file=minissdpd-1.5.tar.gz'>minissdpd-1.5.tar.gz</a></td>
937 <td class="filesize">34404</td>
938 <td class="filedate">13/01/2016 15:26:53 +0000</td>
939 <td class="comment">MiniSSDPd release source code</td>
940 <td></td>
941</tr>
942<tr>
943 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151212.tar.gz'>miniupnpd-1.9.20151212.tar.gz</a></td>
944 <td class="filesize">210912</td>
945 <td class="filedate">12/12/2015 10:06:07 +0000</td>
946 <td class="comment">MiniUPnP daemon source code</td>
947 <td></td>
948</tr>
949<tr>
950 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151118.tar.gz'>miniupnpd-1.9.20151118.tar.gz</a></td>
951 <td class="filesize">210322</td>
952 <td class="filedate">18/11/2015 08:59:46 +0000</td>
953 <td class="comment">MiniUPnP daemon source code</td>
954 <td></td>
955</tr>
956<tr>
957 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151026.tar.gz'>miniupnpc-1.9.20151026.tar.gz</a></td>
958 <td class="filesize">84208</td>
959 <td class="filedate">26/10/2015 17:07:34 +0000</td>
960 <td class="comment">MiniUPnP client source code</td>
961 <td></td>
962</tr>
963<tr>
964 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151008.tar.gz'>miniupnpc-1.9.20151008.tar.gz</a></td>
965 <td class="filesize">83538</td>
966 <td class="filedate">08/10/2015 16:22:28 +0000</td>
967 <td class="comment">MiniUPnP client source code</td>
968 <td></td>
969</tr>
970<tr>
971 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150922.tar.gz'>miniupnpd-1.9.20150922.tar.gz</a></td>
972 <td class="filesize">208700</td>
973 <td class="filedate">22/09/2015 10:21:50 +0000</td>
974 <td class="comment">MiniUPnP daemon source code</td>
975 <td></td>
976</tr>
977<tr>
978 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150918.zip'>upnpc-exe-win32-20150918.zip</a></td>
979 <td class="filesize">100004</td>
980 <td class="filedate">18/09/2015 12:50:51 +0000</td>
981 <td class="comment">Windows executable</td>
982 <td></td>
983</tr>
984<tr>
985 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150917.tar.gz'>miniupnpc-1.9.20150917.tar.gz</a></td>
986 <td class="filesize">82609</td>
987 <td class="filedate">17/09/2015 14:09:14 +0000</td>
988 <td class="comment">MiniUPnP client source code</td>
989 <td></td>
990</tr>
991<tr>
992 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150824.zip'>upnpc-exe-win32-20150824.zip</a></td>
993 <td class="filesize">99520</td>
994 <td class="filedate">24/08/2015 15:25:18 +0000</td>
995 <td class="comment">Windows executable</td>
996 <td></td>
997</tr>
998<tr>
999 <td class="filename"><a href='download.php?file=minissdpd-1.4.tar.gz'>minissdpd-1.4.tar.gz</a></td>
1000 <td class="filesize">32017</td>
1001 <td class="filedate">06/08/2015 13:38:37 +0000</td>
1002 <td class="comment">MiniSSDPd release source code</td>
1003 <td></td>
1004</tr>
1005<tr>
1006 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150730.tar.gz'>miniupnpc-1.9.20150730.tar.gz</a></td>
1007 <td class="filesize">81431</td>
1008 <td class="filedate">29/07/2015 22:10:00 +0000</td>
1009 <td class="comment">MiniUPnP client source code</td>
1010 <td></td>
1011</tr>
1012<tr>
1013 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150721.tar.gz'>miniupnpd-1.9.20150721.tar.gz</a></td>
1014 <td class="filesize">207562</td>
1015 <td class="filedate">21/07/2015 13:35:51 +0000</td>
1016 <td class="comment">MiniUPnP daemon source code</td>
1017 <td></td>
1018</tr>
1019<tr>
1020 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150721.tar.gz'>miniupnpc-1.9.20150721.tar.gz</a></td>
1021 <td class="filesize">80521</td>
1022 <td class="filedate">21/07/2015 13:27:00 +0000</td>
1023 <td class="comment">MiniUPnP client source code</td>
1024 <td></td>
1025</tr>
1026<tr>
1027 <td class="filename"><a href='download.php?file=libnatpmp-20150609.tar.gz'>libnatpmp-20150609.tar.gz</a></td>
1028 <td class="filesize">24392</td>
1029 <td class="filedate">09/06/2015 15:40:28 +0000</td>
1030 <td class="comment">libnatpmp source code</td>
1031 <td></td>
1032</tr>
1033<tr>
1034 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150609.tar.gz'>miniupnpc-1.9.20150609.tar.gz</a></td>
1035 <td class="filesize">79311</td>
1036 <td class="filedate">09/06/2015 15:39:48 +0000</td>
1037 <td class="comment">MiniUPnP client source code</td>
1038 <td></td>
1039</tr>
1040<tr>
1041 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150609.tar.gz'>miniupnpd-1.9.20150609.tar.gz</a></td>
1042 <td class="filesize">207088</td>
1043 <td class="filedate">09/06/2015 15:39:36 +0000</td>
1044 <td class="comment">MiniUPnP daemon source code</td>
1045 <td></td>
1046</tr>
1047<tr>
1048 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150527.tar.gz'>minissdpd-1.3.20150527.tar.gz</a></td>
1049 <td class="filesize">31025</td>
1050 <td class="filedate">27/05/2015 09:17:15 +0000</td>
1051 <td class="comment">MiniSSDPd source code</td>
1052 <td></td>
1053</tr>
1054<tr>
1055 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150522.tar.gz'>miniupnpc-1.9.20150522.tar.gz</a></td>
1056 <td class="filesize">79080</td>
1057 <td class="filedate">22/05/2015 11:02:27 +0000</td>
1058 <td class="comment">MiniUPnP client source code</td>
1059 <td></td>
1060</tr>
1061<tr>
1062 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150522.tar.gz'>minissdpd-1.3.20150522.tar.gz</a></td>
1063 <td class="filesize">30334</td>
1064 <td class="filedate">22/05/2015 11:02:04 +0000</td>
1065 <td class="comment">MiniSSDPd source code</td>
1066 <td></td>
1067</tr>
1068<tr>
1069 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150430.tar.gz'>miniupnpd-1.9.20150430.tar.gz</a></td>
1070 <td class="filesize">205930</td>
1071 <td class="filedate">30/04/2015 09:09:27 +0000</td>
1072 <td class="comment">MiniUPnP daemon source code</td>
1073 <td></td>
1074</tr>
1075<tr>
1076 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150430.tar.gz'>miniupnpc-1.9.20150430.tar.gz</a></td>
1077 <td class="filesize">78459</td>
1078 <td class="filedate">30/04/2015 08:39:31 +0000</td>
1079 <td class="comment">MiniUPnP client source code</td>
1080 <td></td>
1081</tr>
1082<tr>
1083 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150427.tar.gz'>miniupnpc-1.9.20150427.tar.gz</a></td>
1084 <td class="filesize">78424</td>
1085 <td class="filedate">27/04/2015 16:08:42 +0000</td>
1086 <td class="comment">MiniUPnP client source code</td>
1087 <td></td>
1088</tr>
1089<tr>
1090 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150427.tar.gz'>miniupnpd-1.9.20150427.tar.gz</a></td>
1091 <td class="filesize">191157</td>
1092 <td class="filedate">27/04/2015 16:08:27 +0000</td>
1093 <td class="comment">MiniUPnP daemon source code</td>
1094 <td></td>
1095</tr>
1096<tr>
1097 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150307.tar.gz'>miniupnpd-1.9.20150307.tar.gz</a></td>
1098 <td class="filesize">190913</td>
1099 <td class="filedate">07/03/2015 16:11:51 +0000</td>
1100 <td class="comment">MiniUPnP daemon source code</td>
1101 <td></td>
1102</tr>
1103<tr>
1104 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150206.tar.gz'>miniupnpc-1.9.20150206.tar.gz</a></td>
1105 <td class="filesize">76864</td>
1106 <td class="filedate">06/02/2015 14:38:00 +0000</td>
1107 <td class="comment">MiniUPnP client source code</td>
1108 <td></td>
1109</tr>
1110<tr>
1111 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141209.tar.gz'>miniupnpd-1.9.20141209.tar.gz</a></td>
1112 <td class="filesize">193183</td>
1113 <td class="filedate">09/12/2014 09:58:34 +0000</td>
1114 <td class="comment">MiniUPnP daemon source code</td>
1115 <td></td>
1116</tr>
1117<tr>
1118 <td class="filename"><a href='download.php?file=minissdpd-1.3.tar.gz'>minissdpd-1.3.tar.gz</a></td>
1119 <td class="filesize">30326</td>
1120 <td class="filedate">09/12/2014 09:57:30 +0000</td>
1121 <td class="comment">MiniSSDPd release source code</td>
1122 <td></td>
1123</tr>
1124<tr>
1125 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141204.tar.gz'>minissdpd-1.2.20141204.tar.gz</a></td>
1126 <td class="filesize">26978</td>
1127 <td class="filedate">04/12/2014 10:55:26 +0000</td>
1128 <td class="comment">MiniSSDPd source code</td>
1129 <td></td>
1130</tr>
1131<tr>
1132 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141204.tar.gz'>miniupnpd-1.9.20141204.tar.gz</a></td>
1133 <td class="filesize">192597</td>
1134 <td class="filedate">04/12/2014 10:55:03 +0000</td>
1135 <td class="comment">MiniUPnP daemon source code</td>
1136 <td></td>
1137</tr>
1138<tr>
1139 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141128.tar.gz'>minissdpd-1.2.20141128.tar.gz</a></td>
1140 <td class="filesize">26795</td>
1141 <td class="filedate">28/11/2014 16:33:10 +0000</td>
1142 <td class="comment">MiniSSDPd source code</td>
1143 <td></td>
1144</tr>
1145<tr>
1146 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141128.tar.gz'>miniupnpd-1.9.20141128.tar.gz</a></td>
1147 <td class="filesize">192558</td>
1148 <td class="filedate">28/11/2014 13:31:36 +0000</td>
1149 <td class="comment">MiniUPnP daemon source code</td>
1150 <td></td>
1151</tr>
1152<tr>
1153 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141128.tar.gz'>miniupnpc-1.9.20141128.tar.gz</a></td>
1154 <td class="filesize">76541</td>
1155 <td class="filedate">28/11/2014 13:31:15 +0000</td>
1156 <td class="comment">MiniUPnP client source code</td>
1157 <td></td>
1158</tr>
1159<tr>
1160 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141117.tar.gz'>miniupnpc-1.9.20141117.tar.gz</a></td>
1161 <td class="filesize">73865</td>
1162 <td class="filedate">17/11/2014 09:51:36 +0000</td>
1163 <td class="comment">MiniUPnP client source code</td>
1164 <td></td>
1165</tr>
1166<tr>
1167 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141113.tar.gz'>miniupnpc-1.9.20141113.tar.gz</a></td>
1168 <td class="filesize">72857</td>
1169 <td class="filedate">13/11/2014 10:36:44 +0000</td>
1170 <td class="comment">MiniUPnP client source code</td>
1171 <td></td>
1172</tr>
1173<tr>
1174 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141108.tar.gz'>minissdpd-1.2.20141108.tar.gz</a></td>
1175 <td class="filesize">22001</td>
1176 <td class="filedate">08/11/2014 13:55:41 +0000</td>
1177 <td class="comment">MiniSSDPd source code</td>
1178 <td></td>
1179</tr>
1180<tr>
1181 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141108.tar.gz'>miniupnpc-1.9.20141108.tar.gz</a></td>
1182 <td class="filesize">72781</td>
1183 <td class="filedate">08/11/2014 13:53:48 +0000</td>
1184 <td class="comment">MiniUPnP client source code</td>
1185 <td></td>
1186</tr>
1187<tr>
1188 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141108.tar.gz'>miniupnpd-1.9.20141108.tar.gz</a></td>
1189 <td class="filesize">192413</td>
1190 <td class="filedate">08/11/2014 13:53:38 +0000</td>
1191 <td class="comment">MiniUPnP daemon source code</td>
1192 <td></td>
1193</tr>
1194<tr>
1195 <td class="filename"><a href='download.php?file=miniupnpd-1.9.tar.gz'>miniupnpd-1.9.tar.gz</a></td>
1196 <td class="filesize">192183</td>
1197 <td class="filedate">27/10/2014 16:45:34 +0000</td>
1198 <td class="comment">MiniUPnP daemon release source code</td>
1199 <td></td>
1200</tr>
1201<tr>
1202 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141027.tar.gz'>miniupnpc-1.9.20141027.tar.gz</a></td>
1203 <td class="filesize">76763</td>
1204 <td class="filedate">27/10/2014 16:45:25 +0000</td>
1205 <td class="comment">MiniUPnP client source code</td>
1206 <td></td>
1207</tr>
1208<tr>
1209 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141022.tar.gz'>miniupnpd-1.8.20141022.tar.gz</a></td>
1210 <td class="filesize">191630</td>
1211 <td class="filedate">22/10/2014 09:17:41 +0000</td>
1212 <td class="comment">MiniUPnP daemon source code</td>
1213 <td></td>
1214</tr>
1215<tr>
1216 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141021.tar.gz'>miniupnpd-1.8.20141021.tar.gz</a></td>
1217 <td class="filesize">191270</td>
1218 <td class="filedate">21/10/2014 14:18:58 +0000</td>
1219 <td class="comment">MiniUPnP daemon source code</td>
1220 <td></td>
1221</tr>
1222<tr>
1223 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140911.tar.gz'>miniupnpc-1.9.20140911.tar.gz</a></td>
1224 <td class="filesize">76855</td>
1225 <td class="filedate">11/09/2014 14:15:23 +0000</td>
1226 <td class="comment">MiniUPnP client source code</td>
1227 <td></td>
1228</tr>
1229<tr>
1230 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140906.tar.gz'>minissdpd-1.2.20140906.tar.gz</a></td>
1231 <td class="filesize">21956</td>
1232 <td class="filedate">06/09/2014 08:34:10 +0000</td>
1233 <td class="comment">MiniSSDPd source code</td>
1234 <td></td>
1235</tr>
1236<tr>
1237 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140906.tar.gz'>miniupnpd-1.8.20140906.tar.gz</a></td>
1238 <td class="filesize">191183</td>
1239 <td class="filedate">06/09/2014 08:34:02 +0000</td>
1240 <td class="comment">MiniUPnP daemon source code</td>
1241 <td></td>
1242</tr>
1243<tr>
1244 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140906.tar.gz'>miniupnpc-1.9.20140906.tar.gz</a></td>
1245 <td class="filesize">76791</td>
1246 <td class="filedate">06/09/2014 08:33:45 +0000</td>
1247 <td class="comment">MiniUPnP client source code</td>
1248 <td></td>
1249</tr>
1250<tr>
1251 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140701.tar.gz'>miniupnpc-1.9.20140701.tar.gz</a></td>
1252 <td class="filesize">76735</td>
1253 <td class="filedate">01/07/2014 13:06:51 +0000</td>
1254 <td class="comment">MiniUPnP client source code</td>
1255 <td></td>
1256</tr>
1257<tr>
1258 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140610.tar.gz'>miniupnpc-1.9.20140610.tar.gz</a></td>
1259 <td class="filesize">76674</td>
1260 <td class="filedate">10/06/2014 10:28:27 +0000</td>
1261 <td class="comment">MiniUPnP client source code</td>
1262 <td></td>
1263</tr>
1264<tr>
1265 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140610.tar.gz'>minissdpd-1.2.20140610.tar.gz</a></td>
1266 <td class="filesize">21909</td>
1267 <td class="filedate">10/06/2014 10:03:29 +0000</td>
1268 <td class="comment">MiniSSDPd source code</td>
1269 <td></td>
1270</tr>
1271<tr>
1272 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140523.tar.gz'>miniupnpd-1.8.20140523.tar.gz</a></td>
1273 <td class="filesize">190936</td>
1274 <td class="filedate">23/05/2014 15:48:03 +0000</td>
1275 <td class="comment">MiniUPnP daemon source code</td>
1276 <td></td>
1277</tr>
1278<tr>
1279 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140422.zip'>upnpc-exe-win32-20140422.zip</a></td>
1280 <td class="filesize">97505</td>
1281 <td class="filedate">22/04/2014 10:10:07 +0000</td>
1282 <td class="comment">Windows executable</td>
1283 <td></td>
1284</tr>
1285<tr>
1286 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140422.tar.gz'>miniupnpd-1.8.20140422.tar.gz</a></td>
1287 <td class="filesize">187225</td>
1288 <td class="filedate">22/04/2014 08:58:56 +0000</td>
1289 <td class="comment">MiniUPnP daemon source code</td>
1290 <td></td>
1291</tr>
1292<tr>
1293 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140401.tar.gz'>miniupnpd-1.8.20140401.tar.gz</a></td>
1294 <td class="filesize">183131</td>
1295 <td class="filedate">01/04/2014 10:07:20 +0000</td>
1296 <td class="comment">MiniUPnP daemon source code</td>
1297 <td></td>
1298</tr>
1299<tr>
1300 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140401.tar.gz'>miniupnpc-1.9.20140401.tar.gz</a></td>
1301 <td class="filesize">74703</td>
1302 <td class="filedate">01/04/2014 09:49:46 +0000</td>
1303 <td class="comment">MiniUPnP client source code</td>
1304 <td></td>
1305</tr>
1306<tr>
1307 <td class="filename"><a href='download.php?file=libnatpmp-20140401.tar.gz'>libnatpmp-20140401.tar.gz</a></td>
1308 <td class="filesize">23302</td>
1309 <td class="filedate">01/04/2014 09:49:44 +0000</td>
1310 <td class="comment">libnatpmp source code</td>
1311 <td></td>
1312</tr>
1313<tr>
1314 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140313.tar.gz'>miniupnpd-1.8.20140313.tar.gz</a></td>
1315 <td class="filesize">177120</td>
1316 <td class="filedate">13/03/2014 10:39:11 +0000</td>
1317 <td class="comment">MiniUPnP daemon source code</td>
1318 <td></td>
1319</tr>
1320<tr>
1321 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140310.tar.gz'>miniupnpd-1.8.20140310.tar.gz</a></td>
1322 <td class="filesize">176585</td>
1323 <td class="filedate">09/03/2014 23:16:49 +0000</td>
1324 <td class="comment">MiniUPnP daemon source code</td>
1325 <td></td>
1326</tr>
1327<tr>
1328 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140225.tar.gz'>miniupnpd-1.8.20140225.tar.gz</a></td>
1329 <td class="filesize">175183</td>
1330 <td class="filedate">25/02/2014 11:01:29 +0000</td>
1331 <td class="comment">MiniUPnP daemon source code</td>
1332 <td></td>
1333</tr>
1334<tr>
1335 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140203.tar.gz'>miniupnpd-1.8.20140203.tar.gz</a></td>
1336 <td class="filesize">170112</td>
1337 <td class="filedate">03/02/2014 09:56:05 +0000</td>
1338 <td class="comment">MiniUPnP daemon source code</td>
1339 <td></td>
1340</tr>
1341<tr>
1342 <td class="filename"><a href='download.php?file=miniupnpc-1.9.tar.gz'>miniupnpc-1.9.tar.gz</a></td>
1343 <td class="filesize">74230</td>
1344 <td class="filedate">31/01/2014 13:57:40 +0000</td>
1345 <td class="comment">MiniUPnP client release source code</td>
1346 <td></td>
1347</tr>
1348<tr>
1349 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140127.tar.gz'>miniupnpd-1.8.20140127.tar.gz</a></td>
1350 <td class="filesize">170467</td>
1351 <td class="filedate">27/01/2014 11:25:34 +0000</td>
1352 <td class="comment">MiniUPnP daemon source code</td>
1353 <td></td>
1354</tr>
1355<tr>
1356 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140117.zip'>upnpc-exe-win32-20140117.zip</a></td>
1357 <td class="filesize">97270</td>
1358 <td class="filedate">17/01/2014 11:37:53 +0000</td>
1359 <td class="comment">Windows executable</td>
1360 <td></td>
1361</tr>
1362<tr>
1363 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131216.tar.gz'>miniupnpd-1.8.20131216.tar.gz</a></td>
1364 <td class="filesize">170277</td>
1365 <td class="filedate">16/12/2013 16:15:40 +0000</td>
1366 <td class="comment">MiniUPnP daemon source code</td>
1367 <td></td>
1368</tr>
1369<tr>
1370 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131213.tar.gz'>miniupnpd-1.8.20131213.tar.gz</a></td>
1371 <td class="filesize">169753</td>
1372 <td class="filedate">13/12/2013 16:18:10 +0000</td>
1373 <td class="comment">MiniUPnP daemon source code</td>
1374 <td></td>
1375</tr>
1376<tr>
1377 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131209.tar.gz'>miniupnpc-1.8.20131209.tar.gz</a></td>
1378 <td class="filesize">73900</td>
1379 <td class="filedate">09/12/2013 20:52:54 +0000</td>
1380 <td class="comment">MiniUPnP client source code</td>
1381 <td></td>
1382</tr>
1383<tr>
1384 <td class="filename"><a href='download.php?file=libnatpmp-20131126.tar.gz'>libnatpmp-20131126.tar.gz</a></td>
1385 <td class="filesize">22972</td>
1386 <td class="filedate">26/11/2013 08:51:36 +0000</td>
1387 <td class="comment">libnatpmp source code</td>
1388 <td></td>
1389</tr>
1390<tr>
1391 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131007.tar.gz'>miniupnpc-1.8.20131007.tar.gz</a></td>
1392 <td class="filesize">73750</td>
1393 <td class="filedate">07/10/2013 10:10:25 +0000</td>
1394 <td class="comment">MiniUPnP client source code</td>
1395 <td></td>
1396</tr>
1397<tr>
1398 <td class="filename"><a href='download.php?file=libnatpmp-20130911.tar.gz'>libnatpmp-20130911.tar.gz</a></td>
1399 <td class="filesize">18744</td>
1400 <td class="filedate">11/09/2013 07:35:51 +0000</td>
1401 <td class="comment">libnatpmp source code</td>
1402 <td></td>
1403</tr>
1404<tr>
1405 <td class="filename"><a href='download.php?file=libnatpmp-20130910.tar.gz'>libnatpmp-20130910.tar.gz</a></td>
1406 <td class="filesize">18734</td>
1407 <td class="filedate">10/09/2013 20:15:34 +0000</td>
1408 <td class="comment">libnatpmp source code</td>
1409 <td></td>
1410</tr>
1411<tr>
1412 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130907.tar.gz'>minissdpd-1.2.20130907.tar.gz</a></td>
1413 <td class="filesize">20237</td>
1414 <td class="filedate">07/09/2013 06:46:31 +0000</td>
1415 <td class="comment">MiniSSDPd source code</td>
1416 <td></td>
1417</tr>
1418<tr>
1419 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130819.tar.gz'>minissdpd-1.2.20130819.tar.gz</a></td>
1420 <td class="filesize">20772</td>
1421 <td class="filedate">19/08/2013 16:50:29 +0000</td>
1422 <td class="comment">MiniSSDPd source code</td>
1423 <td></td>
1424</tr>
1425<tr>
1426 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130801.tar.gz'>miniupnpc-1.8.20130801.tar.gz</a></td>
1427 <td class="filesize">73426</td>
1428 <td class="filedate">01/08/2013 21:38:05 +0000</td>
1429 <td class="comment">MiniUPnP client source code</td>
1430 <td></td>
1431</tr>
1432<tr>
1433 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130730.tar.gz'>miniupnpd-1.8.20130730.tar.gz</a></td>
1434 <td class="filesize">149904</td>
1435 <td class="filedate">30/07/2013 11:37:48 +0000</td>
1436 <td class="comment">MiniUPnP daemon source code</td>
1437 <td></td>
1438</tr>
1439<tr>
1440 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130607.tar.gz'>miniupnpd-1.8.20130607.tar.gz</a></td>
1441 <td class="filesize">149521</td>
1442 <td class="filedate">07/06/2013 08:46:17 +0000</td>
1443 <td class="comment">MiniUPnP daemon source code</td>
1444 <td></td>
1445</tr>
1446<tr>
1447 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130521.tar.gz'>miniupnpd-1.8.20130521.tar.gz</a></td>
1448 <td class="filesize">149276</td>
1449 <td class="filedate">21/05/2013 09:01:33 +0000</td>
1450 <td class="comment">MiniUPnP daemon source code</td>
1451 <td></td>
1452</tr>
1453<tr>
1454 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130503.tar.gz'>miniupnpd-1.8.20130503.tar.gz</a></td>
1455 <td class="filesize">148420</td>
1456 <td class="filedate">03/05/2013 19:27:16 +0000</td>
1457 <td class="comment">MiniUPnP daemon source code</td>
1458 <td></td>
1459</tr>
1460<tr>
1461 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130503.tar.gz'>miniupnpc-1.8.20130503.tar.gz</a></td>
1462 <td class="filesize">71858</td>
1463 <td class="filedate">03/05/2013 19:27:07 +0000</td>
1464 <td class="comment">MiniUPnP client source code</td>
1465 <td></td>
1466</tr>
1467<tr>
1468 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130426.tar.gz'>miniupnpd-1.8.20130426.tar.gz</a></td>
1469 <td class="filesize">147890</td>
1470 <td class="filedate">26/04/2013 16:57:20 +0000</td>
1471 <td class="comment">MiniUPnP daemon source code</td>
1472 <td></td>
1473</tr>
1474<tr>
1475 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130211.tar.gz'>miniupnpc-1.8.20130211.tar.gz</a></td>
1476 <td class="filesize">70723</td>
1477 <td class="filedate">11/02/2013 10:32:44 +0000</td>
1478 <td class="comment">MiniUPnP client source code</td>
1479 <td></td>
1480</tr>
1481<tr>
1482 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130207.tar.gz'>miniupnpd-1.8.20130207.tar.gz</a></td>
1483 <td class="filesize">147325</td>
1484 <td class="filedate">07/02/2013 12:29:32 +0000</td>
1485 <td class="comment">MiniUPnP daemon source code</td>
1486 <td></td>
1487</tr>
1488<tr>
1489 <td class="filename"><a href='download.php?file=miniupnpc-1.8.tar.gz'>miniupnpc-1.8.tar.gz</a></td>
1490 <td class="filesize">70624</td>
1491 <td class="filedate">06/02/2013 14:31:06 +0000</td>
1492 <td class="comment">MiniUPnP client release source code</td>
1493 <td></td>
1494</tr>
1495<tr>
1496 <td class="filename"><a href='download.php?file=miniupnpd-1.8.tar.gz'>miniupnpd-1.8.tar.gz</a></td>
1497 <td class="filesize">146679</td>
1498 <td class="filedate">06/02/2013 14:30:59 +0000</td>
1499 <td class="comment">MiniUPnP daemon release source code</td>
1500 <td></td>
1501</tr>
1502<tr>
1503 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20121009.zip'>upnpc-exe-win32-20121009.zip</a></td>
1504 <td class="filesize">96513</td>
1505 <td class="filedate">09/10/2012 17:54:12 +0000</td>
1506 <td class="comment">Windows executable</td>
1507 <td></td>
1508</tr>
1509<tr>
1510 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20121005.tar.gz'>miniupnpd-1.7.20121005.tar.gz</a></td>
1511 <td class="filesize">144393</td>
1512 <td class="filedate">04/10/2012 22:39:05 +0000</td>
1513 <td class="comment">MiniUPnP daemon source code</td>
1514 <td></td>
1515</tr>
1516<tr>
1517 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120830.tar.gz'>miniupnpc-1.7.20120830.tar.gz</a></td>
1518 <td class="filesize">70074</td>
1519 <td class="filedate">30/08/2012 08:41:51 +0000</td>
1520 <td class="comment">MiniUPnP client source code</td>
1521 <td></td>
1522</tr>
1523<tr>
1524 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120824.tar.gz'>miniupnpd-1.7.20120824.tar.gz</a></td>
1525 <td class="filesize">141960</td>
1526 <td class="filedate">24/08/2012 18:15:01 +0000</td>
1527 <td class="comment">MiniUPnP daemon source code</td>
1528 <td></td>
1529</tr>
1530<tr>
1531 <td class="filename"><a href='download.php?file=libnatpmp-20120821.tar.gz'>libnatpmp-20120821.tar.gz</a></td>
1532 <td class="filesize">17832</td>
1533 <td class="filedate">21/08/2012 17:24:46 +0000</td>
1534 <td class="comment">libnatpmp source code</td>
1535 <td></td>
1536</tr>
1537<tr>
1538 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120714.tar.gz'>miniupnpc-1.7.20120714.tar.gz</a></td>
1539 <td class="filesize">69570</td>
1540 <td class="filedate">14/07/2012 14:40:47 +0000</td>
1541 <td class="comment">MiniUPnP client source code</td>
1542 <td></td>
1543</tr>
1544<tr>
1545 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120711.tar.gz'>miniupnpc-1.7.20120711.tar.gz</a></td>
1546 <td class="filesize">69580</td>
1547 <td class="filedate">10/07/2012 22:27:05 +0000</td>
1548 <td class="comment">MiniUPnP client source code</td>
1549 <td></td>
1550</tr>
1551<tr>
1552 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120711.tar.gz'>miniupnpd-1.7.20120711.tar.gz</a></td>
1553 <td class="filesize">141380</td>
1554 <td class="filedate">10/07/2012 22:26:58 +0000</td>
1555 <td class="comment">MiniUPnP daemon source code</td>
1556 <td></td>
1557</tr>
1558<tr>
1559 <td class="filename"><a href='download.php?file=miniupnpd-1.7.tar.gz'>miniupnpd-1.7.tar.gz</a></td>
1560 <td class="filesize">138047</td>
1561 <td class="filedate">27/05/2012 23:13:30 +0000</td>
1562 <td class="comment">MiniUPnP daemon release source code</td>
1563 <td></td>
1564</tr>
1565<tr>
1566 <td class="filename"><a href='download.php?file=miniupnpc-1.7.tar.gz'>miniupnpc-1.7.tar.gz</a></td>
1567 <td class="filesize">68327</td>
1568 <td class="filedate">24/05/2012 18:17:48 +0000</td>
1569 <td class="comment">MiniUPnP client release source code</td>
1570 <td></td>
1571</tr>
1572<tr>
1573 <td class="filename"><a href='download.php?file=minissdpd-1.2.tar.gz'>minissdpd-1.2.tar.gz</a></td>
1574 <td class="filesize">19874</td>
1575 <td class="filedate">24/05/2012 18:06:24 +0000</td>
1576 <td class="comment">MiniSSDPd release source code</td>
1577 <td></td>
1578</tr>
1579<tr>
1580 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120509.tar.gz'>miniupnpd-1.6.20120509.tar.gz</a></td>
1581 <td class="filesize">137147</td>
1582 <td class="filedate">09/05/2012 10:45:44 +0000</td>
1583 <td class="comment">MiniUPnP daemon source code</td>
1584 <td></td>
1585</tr>
1586<tr>
1587 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120509.tar.gz'>miniupnpc-1.6.20120509.tar.gz</a></td>
1588 <td class="filesize">68205</td>
1589 <td class="filedate">09/05/2012 10:45:41 +0000</td>
1590 <td class="comment">MiniUPnP client source code</td>
1591 <td></td>
1592</tr>
1593<tr>
1594 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120509.tar.gz'>minissdpd-1.1.20120509.tar.gz</a></td>
1595 <td class="filesize">18123</td>
1596 <td class="filedate">09/05/2012 10:45:39 +0000</td>
1597 <td class="comment">MiniSSDPd source code</td>
1598 <td></td>
1599</tr>
1600<tr>
1601 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120502.tar.gz'>miniupnpd-1.6.20120502.tar.gz</a></td>
1602 <td class="filesize">136688</td>
1603 <td class="filedate">01/05/2012 22:51:18 +0000</td>
1604 <td class="comment">MiniUPnP daemon source code</td>
1605 <td></td>
1606</tr>
1607<tr>
1608 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120502.tar.gz'>miniupnpc-1.6.20120502.tar.gz</a></td>
1609 <td class="filesize">68170</td>
1610 <td class="filedate">01/05/2012 22:51:11 +0000</td>
1611 <td class="comment">MiniUPnP client source code</td>
1612 <td></td>
1613</tr>
1614<tr>
1615 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120426.tar.gz'>miniupnpd-1.6.20120426.tar.gz</a></td>
1616 <td class="filesize">134764</td>
1617 <td class="filedate">26/04/2012 16:24:29 +0000</td>
1618 <td class="comment">MiniUPnP daemon source code</td>
1619 <td></td>
1620</tr>
1621<tr>
1622 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120424.tar.gz'>miniupnpd-1.6.20120424.tar.gz</a></td>
1623 <td class="filesize">132522</td>
1624 <td class="filedate">23/04/2012 22:43:17 +0000</td>
1625 <td class="comment">MiniUPnP daemon source code</td>
1626 <td></td>
1627</tr>
1628<tr>
1629 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120424.tar.gz'>miniupnpc-1.6.20120424.tar.gz</a></td>
1630 <td class="filesize">68067</td>
1631 <td class="filedate">23/04/2012 22:43:10 +0000</td>
1632 <td class="comment">MiniUPnP client source code</td>
1633 <td></td>
1634</tr>
1635<tr>
1636 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120420.tar.gz'>miniupnpd-1.6.20120420.tar.gz</a></td>
1637 <td class="filesize">131972</td>
1638 <td class="filedate">20/04/2012 14:58:57 +0000</td>
1639 <td class="comment">MiniUPnP daemon source code</td>
1640 <td></td>
1641</tr>
1642<tr>
1643 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120420.tar.gz'>miniupnpc-1.6.20120420.tar.gz</a></td>
1644 <td class="filesize">68068</td>
1645 <td class="filedate">20/04/2012 14:58:39 +0000</td>
1646 <td class="comment">MiniUPnP client source code</td>
1647 <td></td>
1648</tr>
1649<tr>
1650 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120419.tar.gz'>miniupnpd-1.6.20120419.tar.gz</a></td>
1651 <td class="filesize">131088</td>
1652 <td class="filedate">18/04/2012 23:41:36 +0000</td>
1653 <td class="comment">MiniUPnP daemon source code</td>
1654 <td></td>
1655</tr>
1656<tr>
1657 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120418.tar.gz'>miniupnpd-1.6.20120418.tar.gz</a></td>
1658 <td class="filesize">130879</td>
1659 <td class="filedate">18/04/2012 21:01:10 +0000</td>
1660 <td class="comment">MiniUPnP daemon source code</td>
1661 <td></td>
1662</tr>
1663<tr>
1664 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120410.tar.gz'>minissdpd-1.1.20120410.tar.gz</a></td>
1665 <td class="filesize">18059</td>
1666 <td class="filedate">09/04/2012 22:45:38 +0000</td>
1667 <td class="comment">MiniSSDPd source code</td>
1668 <td></td>
1669</tr>
1670<tr>
1671 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120410.tar.gz'>miniupnpc-1.6.20120410.tar.gz</a></td>
1672 <td class="filesize">67934</td>
1673 <td class="filedate">09/04/2012 22:45:10 +0000</td>
1674 <td class="comment">MiniUPnP client source code</td>
1675 <td></td>
1676</tr>
1677<tr>
1678 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120406.tar.gz'>miniupnpd-1.6.20120406.tar.gz</a></td>
1679 <td class="filesize">128992</td>
1680 <td class="filedate">06/04/2012 17:52:57 +0000</td>
1681 <td class="comment">MiniUPnP daemon source code</td>
1682 <td></td>
1683</tr>
1684<tr>
1685 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120320.tar.gz'>miniupnpc-1.6.20120320.tar.gz</a></td>
1686 <td class="filesize">67374</td>
1687 <td class="filedate">20/03/2012 16:55:48 +0000</td>
1688 <td class="comment">MiniUPnP client source code</td>
1689 <td></td>
1690</tr>
1691<tr>
1692 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120320.tar.gz'>miniupnpd-1.6.20120320.tar.gz</a></td>
1693 <td class="filesize">127968</td>
1694 <td class="filedate">20/03/2012 16:46:07 +0000</td>
1695 <td class="comment">MiniUPnP daemon source code</td>
1696 <td></td>
1697</tr>
1698<tr>
1699 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120305.tar.gz'>miniupnpd-1.6.20120305.tar.gz</a></td>
1700 <td class="filesize">126985</td>
1701 <td class="filedate">05/03/2012 20:42:01 +0000</td>
1702 <td class="comment">MiniUPnP daemon source code</td>
1703 <td></td>
1704</tr>
1705<tr>
1706 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120207.tar.gz'>miniupnpd-1.6.20120207.tar.gz</a></td>
1707 <td class="filesize">127425</td>
1708 <td class="filedate">07/02/2012 10:21:16 +0000</td>
1709 <td class="comment">MiniUPnP daemon source code</td>
1710 <td></td>
1711</tr>
1712<tr>
1713 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120203.tar.gz'>miniupnpd-1.6.20120203.tar.gz</a></td>
1714 <td class="filesize">126599</td>
1715 <td class="filedate">03/02/2012 15:14:13 +0000</td>
1716 <td class="comment">MiniUPnP daemon source code</td>
1717 <td></td>
1718</tr>
1719<tr>
1720 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120125.tar.gz'>miniupnpc-1.6.20120125.tar.gz</a></td>
1721 <td class="filesize">67354</td>
1722 <td class="filedate">25/01/2012 21:12:28 +0000</td>
1723 <td class="comment">MiniUPnP client source code</td>
1724 <td></td>
1725</tr>
1726<tr>
1727 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120121.tar.gz'>miniupnpc-1.6.20120121.tar.gz</a></td>
1728 <td class="filesize">67347</td>
1729 <td class="filedate">21/01/2012 14:07:41 +0000</td>
1730 <td class="comment">MiniUPnP client source code</td>
1731 <td></td>
1732</tr>
1733<tr>
1734 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120121.tar.gz'>miniupnpd-1.6.20120121.tar.gz</a></td>
1735 <td class="filesize">126021</td>
1736 <td class="filedate">21/01/2012 14:07:33 +0000</td>
1737 <td class="comment">MiniUPnP daemon source code</td>
1738 <td></td>
1739</tr>
1740<tr>
1741 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120121.tar.gz'>minissdpd-1.1.20120121.tar.gz</a></td>
1742 <td class="filesize">17762</td>
1743 <td class="filedate">21/01/2012 14:07:16 +0000</td>
1744 <td class="comment">MiniSSDPd source code</td>
1745 <td></td>
1746</tr>
1747<tr>
1748 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20120121.zip'>upnpc-exe-win32-20120121.zip</a></td>
1749 <td class="filesize">94575</td>
1750 <td class="filedate">21/01/2012 13:59:11 +0000</td>
1751 <td class="comment">Windows executable</td>
1752 <td></td>
1753</tr>
1754<tr>
1755 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20111212.zip'>upnpc-exe-win32-20111212.zip</a></td>
1756 <td class="filesize">94507</td>
1757 <td class="filedate">12/12/2011 12:33:48 +0000</td>
1758 <td class="comment">Windows executable</td>
1759 <td></td>
1760</tr>
1761<tr>
1762 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20111118.tar.gz'>miniupnpd-1.6.20111118.tar.gz</a></td>
1763 <td class="filesize">125683</td>
1764 <td class="filedate">18/11/2011 11:26:12 +0000</td>
1765 <td class="comment">MiniUPnP daemon source code</td>
1766 <td></td>
1767</tr>
1768<tr>
1769 <td class="filename"><a href='download.php?file=minissdpd-1.1.20111007.tar.gz'>minissdpd-1.1.20111007.tar.gz</a></td>
1770 <td class="filesize">17611</td>
1771 <td class="filedate">07/10/2011 09:47:51 +0000</td>
1772 <td class="comment">MiniSSDPd source code</td>
1773 <td></td>
1774</tr>
1775<tr>
1776 <td class="filename"><a href='download.php?file=xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
1777 <td class="filesize">10329</td>
1778 <td class="filedate">11/08/2011 15:18:25 +0000</td>
1779 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1780 <td></td>
1781</tr>
1782<tr>
1783 <td class="filename"><a href='download.php?file=xchat-upnp20110811-2.8.8.patch'>xchat-upnp20110811-2.8.8.patch</a></td>
1784 <td class="filesize">11529</td>
1785 <td class="filedate">11/08/2011 15:18:23 +0000</td>
1786 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1787 <td></td>
1788</tr>
1789<tr>
1790 <td class="filename"><a href='download.php?file=libnatpmp-20110808.tar.gz'>libnatpmp-20110808.tar.gz</a></td>
1791 <td class="filesize">17762</td>
1792 <td class="filedate">08/08/2011 21:21:34 +0000</td>
1793 <td class="comment">libnatpmp source code</td>
1794 <td></td>
1795</tr>
1796<tr>
1797 <td class="filename"><a href='download.php?file=libnatpmp-20110730.tar.gz'>libnatpmp-20110730.tar.gz</a></td>
1798 <td class="filesize">17687</td>
1799 <td class="filedate">30/07/2011 13:19:31 +0000</td>
1800 <td class="comment">libnatpmp source code</td>
1801 <td></td>
1802</tr>
1803<tr>
1804 <td class="filename"><a href='download.php?file=minissdpd-1.1.tar.gz'>minissdpd-1.1.tar.gz</a></td>
1805 <td class="filesize">17481</td>
1806 <td class="filedate">30/07/2011 13:17:30 +0000</td>
1807 <td class="comment">MiniSSDPd release source code</td>
1808 <td></td>
1809</tr>
1810<tr>
1811 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20110730.tar.gz'>miniupnpd-1.6.20110730.tar.gz</a></td>
1812 <td class="filesize">125583</td>
1813 <td class="filedate">30/07/2011 13:17:09 +0000</td>
1814 <td class="comment">MiniUPnP daemon source code</td>
1815 <td></td>
1816</tr>
1817<tr>
1818 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110729.tar.gz'>minissdpd-1.0.20110729.tar.gz</a></td>
1819 <td class="filesize">15898</td>
1820 <td class="filedate">29/07/2011 08:47:26 +0000</td>
1821 <td class="comment">MiniSSDPd source code</td>
1822 <td></td>
1823</tr>
1824<tr>
1825 <td class="filename"><a href='download.php?file=miniupnpc-1.6.tar.gz'>miniupnpc-1.6.tar.gz</a></td>
1826 <td class="filesize">66454</td>
1827 <td class="filedate">25/07/2011 18:03:09 +0000</td>
1828 <td class="comment">MiniUPnP client release source code</td>
1829 <td></td>
1830</tr>
1831<tr>
1832 <td class="filename"><a href='download.php?file=miniupnpd-1.6.tar.gz'>miniupnpd-1.6.tar.gz</a></td>
1833 <td class="filesize">124917</td>
1834 <td class="filedate">25/07/2011 16:37:57 +0000</td>
1835 <td class="comment">MiniUPnP daemon release source code</td>
1836 <td></td>
1837</tr>
1838<tr>
1839 <td class="filename"><a href='download.php?file=minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
1840 <td class="filesize">7598</td>
1841 <td class="filedate">25/07/2011 14:57:50 +0000</td>
1842 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1843 <td></td>
1844</tr>
1845<tr>
1846 <td class="filename"><a href='download.php?file=libnatpmp-20110715.tar.gz'>libnatpmp-20110715.tar.gz</a></td>
1847 <td class="filesize">17943</td>
1848 <td class="filedate">15/07/2011 08:31:40 +0000</td>
1849 <td class="comment">libnatpmp source code</td>
1850 <td></td>
1851</tr>
1852<tr>
1853 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110715.tar.gz'>miniupnpd-1.5.20110715.tar.gz</a></td>
1854 <td class="filesize">124519</td>
1855 <td class="filedate">15/07/2011 07:55:17 +0000</td>
1856 <td class="comment">MiniUPnP daemon source code</td>
1857 <td></td>
1858</tr>
1859<tr>
1860 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110714.zip'>upnpc-exe-win32-20110714.zip</a></td>
1861 <td class="filesize">94236</td>
1862 <td class="filedate">13/07/2011 23:16:01 +0000</td>
1863 <td class="comment">Windows executable</td>
1864 <td></td>
1865</tr>
1866<tr>
1867 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110623.tar.gz'>miniupnpd-1.5.20110623.tar.gz</a></td>
1868 <td class="filesize">123529</td>
1869 <td class="filedate">22/06/2011 22:29:15 +0000</td>
1870 <td class="comment">MiniUPnP daemon source code</td>
1871 <td></td>
1872</tr>
1873<tr>
1874 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110620.tar.gz'>miniupnpd-1.5.20110620.tar.gz</a></td>
1875 <td class="filesize">123221</td>
1876 <td class="filedate">20/06/2011 14:11:11 +0000</td>
1877 <td class="comment">MiniUPnP daemon source code</td>
1878 <td></td>
1879</tr>
1880<tr>
1881 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110618.tar.gz'>miniupnpd-1.5.20110618.tar.gz</a></td>
1882 <td class="filesize">123176</td>
1883 <td class="filedate">17/06/2011 23:29:18 +0000</td>
1884 <td class="comment">MiniUPnP daemon source code</td>
1885 <td></td>
1886</tr>
1887<tr>
1888 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110618.tar.gz'>miniupnpc-1.5.20110618.tar.gz</a></td>
1889 <td class="filesize">66401</td>
1890 <td class="filedate">17/06/2011 23:29:17 +0000</td>
1891 <td class="comment">MiniUPnP client source code</td>
1892 <td></td>
1893</tr>
1894<tr>
1895 <td class="filename"><a href='download.php?file=libnatpmp-20110618.tar.gz'>libnatpmp-20110618.tar.gz</a></td>
1896 <td class="filesize">17901</td>
1897 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1898 <td class="comment">libnatpmp source code</td>
1899 <td></td>
1900</tr>
1901<tr>
1902 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110618.tar.gz'>minissdpd-1.0.20110618.tar.gz</a></td>
1903 <td class="filesize">15193</td>
1904 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1905 <td class="comment">MiniSSDPd source code</td>
1906 <td></td>
1907</tr>
1908<tr>
1909 <td class="filename" colspan="2"><a href='download.php?file=minidlna_cvs20110529_minissdp1.patch'>minidlna_cvs20110529_minissdp1.patch</a></td>
1910 <td class="filedate">29/05/2011 21:19:09 +0000</td>
1911 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1912 <td></td>
1913</tr>
1914<tr>
1915 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110528.tar.gz'>miniupnpd-1.5.20110528.tar.gz</a></td>
1916 <td class="filesize">121985</td>
1917 <td class="filedate">28/05/2011 09:39:04 +0000</td>
1918 <td class="comment">MiniUPnP daemon source code</td>
1919 <td></td>
1920</tr>
1921<tr>
1922 <td class="filename"><a href='download.php?file=minidlna_1.0.19_minissdp1.patch'>minidlna_1.0.19_minissdp1.patch</a></td>
1923 <td class="filesize">9080</td>
1924 <td class="filedate">27/05/2011 09:55:04 +0000</td>
1925 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1926 <td></td>
1927</tr>
1928<tr>
1929 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110527.tar.gz'>miniupnpd-1.5.20110527.tar.gz</a></td>
1930 <td class="filesize">120896</td>
1931 <td class="filedate">27/05/2011 08:28:35 +0000</td>
1932 <td class="comment">MiniUPnP daemon source code</td>
1933 <td></td>
1934</tr>
1935<tr>
1936 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110527.tar.gz'>miniupnpc-1.5.20110527.tar.gz</a></td>
1937 <td class="filesize">66279</td>
1938 <td class="filedate">27/05/2011 08:28:34 +0000</td>
1939 <td class="comment">MiniUPnP client source code</td>
1940 <td></td>
1941</tr>
1942<tr>
1943 <td class="filename"><a href='download.php?file=libnatpmp-20110527.tar.gz'>libnatpmp-20110527.tar.gz</a></td>
1944 <td class="filesize">17627</td>
1945 <td class="filedate">27/05/2011 08:28:33 +0000</td>
1946 <td class="comment">libnatpmp source code</td>
1947 <td></td>
1948</tr>
1949<tr>
1950 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110523.tar.gz'>minissdpd-1.0.20110523.tar.gz</a></td>
1951 <td class="filesize">15024</td>
1952 <td class="filedate">23/05/2011 12:55:31 +0000</td>
1953 <td class="comment">MiniSSDPd source code</td>
1954 <td></td>
1955</tr>
1956<tr>
1957 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110520.tar.gz'>miniupnpd-1.5.20110520.tar.gz</a></td>
1958 <td class="filesize">119227</td>
1959 <td class="filedate">20/05/2011 18:00:41 +0000</td>
1960 <td class="comment">MiniUPnP daemon source code</td>
1961 <td></td>
1962</tr>
1963<tr>
1964 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110519.tar.gz'>miniupnpd-1.5.20110519.tar.gz</a></td>
1965 <td class="filesize">114735</td>
1966 <td class="filedate">18/05/2011 22:29:06 +0000</td>
1967 <td class="comment">MiniUPnP daemon source code</td>
1968 <td></td>
1969</tr>
1970<tr>
1971 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110516.tar.gz'>miniupnpd-1.5.20110516.tar.gz</a></td>
1972 <td class="filesize">113348</td>
1973 <td class="filedate">16/05/2011 09:32:51 +0000</td>
1974 <td class="comment">MiniUPnP daemon source code</td>
1975 <td></td>
1976</tr>
1977<tr>
1978 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110515.tar.gz'>miniupnpd-1.5.20110515.tar.gz</a></td>
1979 <td class="filesize">113135</td>
1980 <td class="filedate">15/05/2011 21:51:29 +0000</td>
1981 <td class="comment">MiniUPnP daemon source code</td>
1982 <td></td>
1983</tr>
1984<tr>
1985 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110515.tar.gz'>miniupnpc-1.5.20110515.tar.gz</a></td>
1986 <td class="filesize">66112</td>
1987 <td class="filedate">15/05/2011 21:51:28 +0000</td>
1988 <td class="comment">MiniUPnP client source code</td>
1989 <td></td>
1990</tr>
1991<tr>
1992 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110513.tar.gz'>miniupnpd-1.5.20110513.tar.gz</a></td>
1993 <td class="filesize">111029</td>
1994 <td class="filedate">13/05/2011 14:03:12 +0000</td>
1995 <td class="comment">MiniUPnP daemon source code</td>
1996 <td></td>
1997</tr>
1998<tr>
1999 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110506.tar.gz'>miniupnpc-1.5.20110506.tar.gz</a></td>
2000 <td class="filesize">65536</td>
2001 <td class="filedate">06/05/2011 16:35:38 +0000</td>
2002 <td class="comment">MiniUPnP client source code</td>
2003 <td></td>
2004</tr>
2005<tr>
2006 <td class="filename"><a href='download.php?file=miniupnpc-1.4-v6.20100505.zip'>miniupnpc-1.4-v6.20100505.zip</a></td>
2007 <td class="filesize">91833</td>
2008 <td class="filedate">18/04/2011 20:14:11 +0000</td>
2009 <td class="comment"></td>
2010 <td></td>
2011</tr>
2012<tr>
2013 <td class="filename"><a href='download.php?file=miniupnpd-1.4-v6.20100823.zip'>miniupnpd-1.4-v6.20100823.zip</a></td>
2014 <td class="filesize">222235</td>
2015 <td class="filedate">18/04/2011 20:14:07 +0000</td>
2016 <td class="comment"></td>
2017 <td></td>
2018</tr>
2019<tr>
2020 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110418.tar.gz'>miniupnpc-1.5.20110418.tar.gz</a></td>
2021 <td class="filesize">61820</td>
2022 <td class="filedate">18/04/2011 20:09:22 +0000</td>
2023 <td class="comment">MiniUPnP client source code</td>
2024 <td></td>
2025</tr>
2026<tr>
2027 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110418.zip'>upnpc-exe-win32-20110418.zip</a></td>
2028 <td class="filesize">94183</td>
2029 <td class="filedate">18/04/2011 17:53:26 +0000</td>
2030 <td class="comment">Windows executable</td>
2031 <td></td>
2032</tr>
2033<tr>
2034 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110314.tar.gz'>miniupnpc-1.5.20110314.tar.gz</a></td>
2035 <td class="filesize">57210</td>
2036 <td class="filedate">14/03/2011 14:27:29 +0000</td>
2037 <td class="comment">MiniUPnP client source code</td>
2038 <td></td>
2039</tr>
2040<tr>
2041 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110309.tar.gz'>miniupnpd-1.5.20110309.tar.gz</a></td>
2042 <td class="filesize">100073</td>
2043 <td class="filedate">09/03/2011 15:36:12 +0000</td>
2044 <td class="comment">MiniUPnP daemon source code</td>
2045 <td></td>
2046</tr>
2047<tr>
2048 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110302.tar.gz'>miniupnpd-1.5.20110302.tar.gz</a></td>
2049 <td class="filesize">100756</td>
2050 <td class="filedate">02/03/2011 16:17:44 +0000</td>
2051 <td class="comment">MiniUPnP daemon source code</td>
2052 <td></td>
2053</tr>
2054<tr>
2055 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110221.tar.gz'>miniupnpd-1.5.20110221.tar.gz</a></td>
2056 <td class="filesize">100092</td>
2057 <td class="filedate">20/02/2011 23:48:17 +0000</td>
2058 <td class="comment">MiniUPnP daemon source code</td>
2059 <td></td>
2060</tr>
2061<tr>
2062 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110215.zip'>upnpc-exe-win32-20110215.zip</a></td>
2063 <td class="filesize">55409</td>
2064 <td class="filedate">15/02/2011 23:05:00 +0000</td>
2065 <td class="comment">Windows executable</td>
2066 <td></td>
2067</tr>
2068<tr>
2069 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110215.tar.gz'>miniupnpc-1.5.20110215.tar.gz</a></td>
2070 <td class="filesize">54880</td>
2071 <td class="filedate">15/02/2011 11:16:04 +0000</td>
2072 <td class="comment">MiniUPnP client source code</td>
2073 <td></td>
2074</tr>
2075<tr>
2076 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110214.tar.gz'>miniupnpd-1.5.20110214.tar.gz</a></td>
2077 <td class="filesize">99629</td>
2078 <td class="filedate">14/02/2011 18:00:43 +0000</td>
2079 <td class="comment">MiniUPnP daemon source code</td>
2080 <td></td>
2081</tr>
2082<tr>
2083 <td class="filename"><a href='download.php?file=minidlna_1.0.18_minissdp1.patch'>minidlna_1.0.18_minissdp1.patch</a></td>
2084 <td class="filesize">9747</td>
2085 <td class="filedate">02/02/2011 15:12:19 +0000</td>
2086 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
2087 <td></td>
2088</tr>
2089<tr>
2090 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110127.tar.gz'>miniupnpd-1.5.20110127.tar.gz</a></td>
2091 <td class="filesize">97421</td>
2092 <td class="filedate">27/01/2011 17:51:25 +0000</td>
2093 <td class="comment">MiniUPnP daemon source code</td>
2094 <td></td>
2095</tr>
2096<tr>
2097 <td class="filename"><a href='download.php?file=miniupnpd-1.5.tar.gz'>miniupnpd-1.5.tar.gz</a></td>
2098 <td class="filesize">98993</td>
2099 <td class="filedate">04/01/2011 09:45:10 +0000</td>
2100 <td class="comment">MiniUPnP daemon release source code</td>
2101 <td></td>
2102</tr>
2103<tr>
2104 <td class="filename"><a href='download.php?file=miniupnpc-1.5.tar.gz'>miniupnpc-1.5.tar.gz</a></td>
2105 <td class="filesize">53309</td>
2106 <td class="filedate">04/01/2011 09:45:06 +0000</td>
2107 <td class="comment">MiniUPnP client release source code</td>
2108 <td></td>
2109</tr>
2110<tr>
2111 <td class="filename"><a href='download.php?file=libnatpmp-20110103.tar.gz'>libnatpmp-20110103.tar.gz</a></td>
2112 <td class="filesize">17529</td>
2113 <td class="filedate">03/01/2011 17:33:16 +0000</td>
2114 <td class="comment">libnatpmp source code</td>
2115 <td></td>
2116</tr>
2117<tr>
2118 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101221.tar.gz'>miniupnpc-1.4.20101221.tar.gz</a></td>
2119 <td class="filesize">52342</td>
2120 <td class="filedate">21/12/2010 16:15:38 +0000</td>
2121 <td class="comment">MiniUPnP client source code</td>
2122 <td></td>
2123</tr>
2124<tr>
2125 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20101213.zip'>upnpc-exe-win32-20101213.zip</a></td>
2126 <td class="filesize">52359</td>
2127 <td class="filedate">12/12/2010 23:44:01 +0000</td>
2128 <td class="comment">Windows executable</td>
2129 <td></td>
2130</tr>
2131<tr>
2132 <td class="filename"><a href='download.php?file=libnatpmp-20101211.tar.gz'>libnatpmp-20101211.tar.gz</a></td>
2133 <td class="filesize">17324</td>
2134 <td class="filedate">11/12/2010 17:20:36 +0000</td>
2135 <td class="comment">libnatpmp source code</td>
2136 <td></td>
2137</tr>
2138<tr>
2139 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101209.tar.gz'>miniupnpc-1.4.20101209.tar.gz</a></td>
2140 <td class="filesize">51900</td>
2141 <td class="filedate">09/12/2010 16:17:30 +0000</td>
2142 <td class="comment">MiniUPnP client source code</td>
2143 <td></td>
2144</tr>
2145<tr>
2146 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100921.tar.gz'>miniupnpd-1.4.20100921.tar.gz</a></td>
2147 <td class="filesize">95483</td>
2148 <td class="filedate">21/09/2010 15:50:00 +0000</td>
2149 <td class="comment">MiniUPnP daemon source code</td>
2150 <td></td>
2151</tr>
2152<tr>
2153 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100825.zip'>upnpc-exe-win32-20100825.zip</a></td>
2154 <td class="filesize">50636</td>
2155 <td class="filedate">25/08/2010 08:42:59 +0000</td>
2156 <td class="comment">Windows executable</td>
2157 <td></td>
2158</tr>
2159<tr>
2160 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100609.tar.gz'>miniupnpc-1.4.20100609.tar.gz</a></td>
2161 <td class="filesize">50390</td>
2162 <td class="filedate">09/06/2010 11:03:11 +0000</td>
2163 <td class="comment">MiniUPnP client source code</td>
2164 <td></td>
2165</tr>
2166<tr>
2167 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100513.zip'>upnpc-exe-win32-20100513.zip</a></td>
2168 <td class="filesize">50950</td>
2169 <td class="filedate">13/05/2010 16:54:33 +0000</td>
2170 <td class="comment">Windows executable</td>
2171 <td></td>
2172</tr>
2173<tr>
2174 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100511.tar.gz'>miniupnpd-1.4.20100511.tar.gz</a></td>
2175 <td class="filesize">93281</td>
2176 <td class="filedate">11/05/2010 16:22:33 +0000</td>
2177 <td class="comment">MiniUPnP daemon source code</td>
2178 <td></td>
2179</tr>
2180<tr>
2181 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100418.zip'>upnpc-exe-win32-20100418.zip</a></td>
2182 <td class="filesize">40758</td>
2183 <td class="filedate">17/04/2010 23:00:37 +0000</td>
2184 <td class="comment">Windows executable</td>
2185 <td></td>
2186</tr>
2187<tr>
2188 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100418.tar.gz'>miniupnpc-1.4.20100418.tar.gz</a></td>
2189 <td class="filesize">50245</td>
2190 <td class="filedate">17/04/2010 22:18:31 +0000</td>
2191 <td class="comment">MiniUPnP client source code</td>
2192 <td></td>
2193</tr>
2194<tr>
2195 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100412.tar.gz'>miniupnpc-1.4.20100412.tar.gz</a></td>
2196 <td class="filesize">50145</td>
2197 <td class="filedate">12/04/2010 20:42:53 +0000</td>
2198 <td class="comment">MiniUPnP client source code</td>
2199 <td></td>
2200</tr>
2201<tr>
2202 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100407.tar.gz'>miniupnpc-1.4.20100407.tar.gz</a></td>
2203 <td class="filesize">49756</td>
2204 <td class="filedate">07/04/2010 10:05:08 +0000</td>
2205 <td class="comment">MiniUPnP client source code</td>
2206 <td></td>
2207</tr>
2208<tr>
2209 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100405.tar.gz'>miniupnpc-1.4.20100405.tar.gz</a></td>
2210 <td class="filesize">49549</td>
2211 <td class="filedate">05/04/2010 14:34:38 +0000</td>
2212 <td class="comment">MiniUPnP client source code</td>
2213 <td></td>
2214</tr>
2215<tr>
2216 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100308.tar.gz'>miniupnpd-1.4.20100308.tar.gz</a></td>
2217 <td class="filesize">92889</td>
2218 <td class="filedate">08/03/2010 17:18:00 +0000</td>
2219 <td class="comment">MiniUPnP daemon source code</td>
2220 <td></td>
2221</tr>
2222<tr>
2223 <td class="filename"><a href='download.php?file=libnatpmp-20100202.tar.gz'>libnatpmp-20100202.tar.gz</a></td>
2224 <td class="filesize">17231</td>
2225 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2226 <td class="comment">libnatpmp source code</td>
2227 <td></td>
2228</tr>
2229<tr>
2230 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100202.tar.gz'>miniupnpc-1.4.20100202.tar.gz</a></td>
2231 <td class="filesize">46710</td>
2232 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2233 <td class="comment">MiniUPnP client source code</td>
2234 <td></td>
2235</tr>
2236<tr>
2237 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100106.tar.gz'>miniupnpc-1.4.20100106.tar.gz</a></td>
2238 <td class="filesize">46659</td>
2239 <td class="filedate">06/01/2010 10:08:21 +0000</td>
2240 <td class="comment">MiniUPnP client source code</td>
2241 <td></td>
2242</tr>
2243<tr>
2244 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091222.tar.gz'>miniupnpd-1.4.20091222.tar.gz</a></td>
2245 <td class="filesize">90993</td>
2246 <td class="filedate">22/12/2009 17:23:48 +0000</td>
2247 <td class="comment">MiniUPnP daemon source code</td>
2248 <td></td>
2249</tr>
2250<tr>
2251 <td class="filename"><a href='download.php?file=libnatpmp-20091219.tar.gz'>libnatpmp-20091219.tar.gz</a></td>
2252 <td class="filesize">16839</td>
2253 <td class="filedate">19/12/2009 14:35:22 +0000</td>
2254 <td class="comment">libnatpmp source code</td>
2255 <td></td>
2256</tr>
2257<tr>
2258 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091213.tar.gz'>miniupnpc-1.4.20091213.tar.gz</a></td>
2259 <td class="filesize">46510</td>
2260 <td class="filedate">12/12/2009 23:05:40 +0000</td>
2261 <td class="comment">MiniUPnP client source code</td>
2262 <td></td>
2263</tr>
2264<tr>
2265 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091211.tar.gz'>miniupnpc-1.4.20091211.tar.gz</a></td>
2266 <td class="filesize">45852</td>
2267 <td class="filedate">11/12/2009 16:43:01 +0000</td>
2268 <td class="comment">MiniUPnP client source code</td>
2269 <td></td>
2270</tr>
2271<tr>
2272 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20091210.zip'>upnpc-exe-win32-20091210.zip</a></td>
2273 <td class="filesize">38666</td>
2274 <td class="filedate">10/12/2009 18:50:27 +0000</td>
2275 <td class="comment">Windows executable</td>
2276 <td></td>
2277</tr>
2278<tr>
2279 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091208.tar.gz'>miniupnpc-1.4.20091208.tar.gz</a></td>
2280 <td class="filesize">43392</td>
2281 <td class="filedate">08/12/2009 10:58:26 +0000</td>
2282 <td class="comment">MiniUPnP client source code</td>
2283 <td></td>
2284</tr>
2285<tr>
2286 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091203.tar.gz'>miniupnpc-1.4.20091203.tar.gz</a></td>
2287 <td class="filesize">42040</td>
2288 <td class="filedate">03/12/2009 13:56:28 +0000</td>
2289 <td class="comment">MiniUPnP client source code</td>
2290 <td></td>
2291</tr>
2292<tr>
2293 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091106.tar.gz'>miniupnpd-1.4.20091106.tar.gz</a></td>
2294 <td class="filesize">90787</td>
2295 <td class="filedate">06/11/2009 21:18:50 +0000</td>
2296 <td class="comment">MiniUPnP daemon source code</td>
2297 <td></td>
2298</tr>
2299<tr>
2300 <td class="filename"><a href='download.php?file=miniupnpd-1.4.tar.gz'>miniupnpd-1.4.tar.gz</a></td>
2301 <td class="filesize">90071</td>
2302 <td class="filedate">30/10/2009 09:20:05 +0000</td>
2303 <td class="comment">MiniUPnP daemon release source code</td>
2304 <td></td>
2305</tr>
2306<tr>
2307 <td class="filename"><a href='download.php?file=miniupnpc-1.4.tar.gz'>miniupnpc-1.4.tar.gz</a></td>
2308 <td class="filesize">41790</td>
2309 <td class="filedate">30/10/2009 09:20:04 +0000</td>
2310 <td class="comment">MiniUPnP client release source code</td>
2311 <td></td>
2312</tr>
2313<tr>
2314 <td class="filename"><a href='download.php?file=miniupnpc-20091016.tar.gz'>miniupnpc-20091016.tar.gz</a></td>
2315 <td class="filesize">41792</td>
2316 <td class="filedate">16/10/2009 09:04:35 +0000</td>
2317 <td class="comment">MiniUPnP client source code</td>
2318 <td></td>
2319</tr>
2320<tr>
2321 <td class="filename"><a href='download.php?file=miniupnpd-20091010.tar.gz'>miniupnpd-20091010.tar.gz</a></td>
2322 <td class="filesize">90043</td>
2323 <td class="filedate">10/10/2009 19:21:30 +0000</td>
2324 <td class="comment">MiniUPnP daemon source code</td>
2325 <td></td>
2326</tr>
2327<tr>
2328 <td class="filename"><a href='download.php?file=miniupnpc-20091010.tar.gz'>miniupnpc-20091010.tar.gz</a></td>
2329 <td class="filesize">41671</td>
2330 <td class="filedate">10/10/2009 19:21:28 +0000</td>
2331 <td class="comment">MiniUPnP client source code</td>
2332 <td></td>
2333</tr>
2334<tr>
2335 <td class="filename"><a href='download.php?file=miniupnpd-20090921.tar.gz'>miniupnpd-20090921.tar.gz</a></td>
2336 <td class="filesize">89476</td>
2337 <td class="filedate">21/09/2009 13:00:04 +0000</td>
2338 <td class="comment">MiniUPnP daemon source code</td>
2339 <td></td>
2340</tr>
2341<tr>
2342 <td class="filename"><a href='download.php?file=miniupnpc-20090921.tar.gz'>miniupnpc-20090921.tar.gz</a></td>
2343 <td class="filesize">41630</td>
2344 <td class="filedate">21/09/2009 13:00:03 +0000</td>
2345 <td class="comment">MiniUPnP client source code</td>
2346 <td></td>
2347</tr>
2348<tr>
2349 <td class="filename"><a href='download.php?file=miniupnpd-20090904.tar.gz'>miniupnpd-20090904.tar.gz</a></td>
2350 <td class="filesize">89344</td>
2351 <td class="filedate">04/09/2009 16:24:26 +0000</td>
2352 <td class="comment">MiniUPnP daemon source code</td>
2353 <td></td>
2354</tr>
2355<tr>
2356 <td class="filename"><a href='download.php?file=miniupnpd-20090820.tar.gz'>miniupnpd-20090820.tar.gz</a></td>
2357 <td class="filesize">89149</td>
2358 <td class="filedate">20/08/2009 09:35:58 +0000</td>
2359 <td class="comment">MiniUPnP daemon source code</td>
2360 <td></td>
2361</tr>
2362<tr>
2363 <td class="filename"><a href='download.php?file=miniupnpc-20090807.tar.gz'>miniupnpc-20090807.tar.gz</a></td>
2364 <td class="filesize">41288</td>
2365 <td class="filedate">07/08/2009 14:46:11 +0000</td>
2366 <td class="comment">MiniUPnP client source code</td>
2367 <td></td>
2368</tr>
2369<tr>
2370 <td class="filename"><a href='download.php?file=miniupnpc-20090729.tar.gz'>miniupnpc-20090729.tar.gz</a></td>
2371 <td class="filesize">40588</td>
2372 <td class="filedate">29/07/2009 08:47:43 +0000</td>
2373 <td class="comment">MiniUPnP client source code</td>
2374 <td></td>
2375</tr>
2376<tr>
2377 <td class="filename"><a href='download.php?file=xchat-upnp20061022.patch'>xchat-upnp20061022.patch</a></td>
2378 <td class="filesize">10258</td>
2379 <td class="filedate">17/07/2009 15:49:46 +0000</td>
2380 <td class="comment">Patch to add UPnP capabilities to xchat</td>
2381 <td></td>
2382</tr>
2383<tr>
2384 <td class="filename"><a href='download.php?file=miniupnpc-20090713.tar.gz'>miniupnpc-20090713.tar.gz</a></td>
2385 <td class="filesize">40206</td>
2386 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2387 <td class="comment">MiniUPnP client source code</td>
2388 <td></td>
2389</tr>
2390<tr>
2391 <td class="filename"><a href='download.php?file=libnatpmp-20090713.tar.gz'>libnatpmp-20090713.tar.gz</a></td>
2392 <td class="filesize">14262</td>
2393 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2394 <td class="comment">libnatpmp source code</td>
2395 <td></td>
2396</tr>
2397<tr>
2398 <td class="filename"><a href='download.php?file=miniupnpd-20090605.tar.gz'>miniupnpd-20090605.tar.gz</a></td>
2399 <td class="filesize">83774</td>
2400 <td class="filedate">04/06/2009 23:32:20 +0000</td>
2401 <td class="comment">MiniUPnP daemon source code</td>
2402 <td></td>
2403</tr>
2404<tr>
2405 <td class="filename"><a href='download.php?file=miniupnpc-20090605.tar.gz'>miniupnpc-20090605.tar.gz</a></td>
2406 <td class="filesize">40077</td>
2407 <td class="filedate">04/06/2009 23:32:16 +0000</td>
2408 <td class="comment">MiniUPnP client source code</td>
2409 <td></td>
2410</tr>
2411<tr>
2412 <td class="filename"><a href='download.php?file=libnatpmp-20090605.tar.gz'>libnatpmp-20090605.tar.gz</a></td>
2413 <td class="filesize">13817</td>
2414 <td class="filedate">04/06/2009 23:32:15 +0000</td>
2415 <td class="comment">libnatpmp source code</td>
2416 <td></td>
2417</tr>
2418<tr>
2419 <td class="filename"><a href='download.php?file=miniupnpd-20090516.tar.gz'>miniupnpd-20090516.tar.gz</a></td>
2420 <td class="filesize">83689</td>
2421 <td class="filedate">16/05/2009 08:47:31 +0000</td>
2422 <td class="comment">MiniUPnP daemon source code</td>
2423 <td></td>
2424</tr>
2425<tr>
2426 <td class="filename"><a href='download.php?file=miniupnpc-1.3.tar.gz'>miniupnpc-1.3.tar.gz</a></td>
2427 <td class="filesize">40058</td>
2428 <td class="filedate">17/04/2009 21:27:55 +0000</td>
2429 <td class="comment">MiniUPnP client release source code</td>
2430 <td></td>
2431</tr>
2432<tr>
2433 <td class="filename"><a href='download.php?file=miniupnpd-1.3.tar.gz'>miniupnpd-1.3.tar.gz</a></td>
2434 <td class="filesize">83464</td>
2435 <td class="filedate">17/04/2009 20:11:21 +0000</td>
2436 <td class="comment">MiniUPnP daemon release source code</td>
2437 <td></td>
2438</tr>
2439<tr>
2440 <td class="filename"><a href='download.php?file=libnatpmp-20090310.tar.gz'>libnatpmp-20090310.tar.gz</a></td>
2441 <td class="filesize">11847</td>
2442 <td class="filedate">10/03/2009 10:19:45 +0000</td>
2443 <td class="comment">libnatpmp source code</td>
2444 <td></td>
2445</tr>
2446<tr>
2447 <td class="filename"><a href='download.php?file=miniupnpd-20090214.tar.gz'>miniupnpd-20090214.tar.gz</a></td>
2448 <td class="filesize">82921</td>
2449 <td class="filedate">14/02/2009 11:27:03 +0000</td>
2450 <td class="comment">MiniUPnP daemon source code</td>
2451 <td></td>
2452</tr>
2453<tr>
2454 <td class="filename"><a href='download.php?file=miniupnpd-20090213.tar.gz'>miniupnpd-20090213.tar.gz</a></td>
2455 <td class="filesize">82594</td>
2456 <td class="filedate">13/02/2009 19:48:01 +0000</td>
2457 <td class="comment">MiniUPnP daemon source code</td>
2458 <td></td>
2459</tr>
2460<tr>
2461 <td class="filename"><a href='download.php?file=libnatpmp-20090129.tar.gz'>libnatpmp-20090129.tar.gz</a></td>
2462 <td class="filesize">11748</td>
2463 <td class="filedate">29/01/2009 21:50:31 +0000</td>
2464 <td class="comment">libnatpmp source code</td>
2465 <td></td>
2466</tr>
2467<tr>
2468 <td class="filename"><a href='download.php?file=miniupnpc-20090129.tar.gz'>miniupnpc-20090129.tar.gz</a></td>
2469 <td class="filesize">39976</td>
2470 <td class="filedate">29/01/2009 21:50:30 +0000</td>
2471 <td class="comment">MiniUPnP client source code</td>
2472 <td></td>
2473</tr>
2474<tr>
2475 <td class="filename"><a href='download.php?file=miniupnpd-20090129.tar.gz'>miniupnpd-20090129.tar.gz</a></td>
2476 <td class="filesize">82487</td>
2477 <td class="filedate">29/01/2009 21:50:27 +0000</td>
2478 <td class="comment">MiniUPnP daemon source code</td>
2479 <td></td>
2480</tr>
2481<tr>
2482 <td class="filename"><a href='download.php?file=miniupnpd-20081009.tar.gz'>miniupnpd-20081009.tar.gz</a></td>
2483 <td class="filesize">81732</td>
2484 <td class="filedate">09/10/2008 12:53:02 +0000</td>
2485 <td class="comment">MiniUPnP daemon source code</td>
2486 <td></td>
2487</tr>
2488<tr>
2489 <td class="filename"><a href='download.php?file=minissdpd-1.0.tar.gz'>minissdpd-1.0.tar.gz</a></td>
2490 <td class="filesize">12996</td>
2491 <td class="filedate">07/10/2008 14:03:49 +0000</td>
2492 <td class="comment">MiniSSDPd release source code</td>
2493 <td></td>
2494</tr>
2495<tr>
2496 <td class="filename"><a href='download.php?file=miniupnpc-1.2.tar.gz'>miniupnpc-1.2.tar.gz</a></td>
2497 <td class="filesize">38787</td>
2498 <td class="filedate">07/10/2008 14:03:47 +0000</td>
2499 <td class="comment">MiniUPnP client release source code</td>
2500 <td></td>
2501</tr>
2502<tr>
2503 <td class="filename"><a href='download.php?file=miniupnpd-1.2.tar.gz'>miniupnpd-1.2.tar.gz</a></td>
2504 <td class="filesize">81025</td>
2505 <td class="filedate">07/10/2008 14:03:45 +0000</td>
2506 <td class="comment">MiniUPnP daemon release source code</td>
2507 <td></td>
2508</tr>
2509<tr>
2510 <td class="filename"><a href='download.php?file=miniupnpd-20081006.tar.gz'>miniupnpd-20081006.tar.gz</a></td>
2511 <td class="filesize">80510</td>
2512 <td class="filedate">06/10/2008 15:50:34 +0000</td>
2513 <td class="comment">MiniUPnP daemon source code</td>
2514 <td></td>
2515</tr>
2516<tr>
2517 <td class="filename"><a href='download.php?file=minissdpd-20081006.tar.gz'>minissdpd-20081006.tar.gz</a></td>
2518 <td class="filesize">12230</td>
2519 <td class="filedate">06/10/2008 15:50:33 +0000</td>
2520 <td class="comment">MiniSSDPd source code</td>
2521 <td></td>
2522</tr>
2523<tr>
2524 <td class="filename"><a href='download.php?file=libnatpmp-20081006.tar.gz'>libnatpmp-20081006.tar.gz</a></td>
2525 <td class="filesize">11710</td>
2526 <td class="filedate">06/10/2008 15:50:31 +0000</td>
2527 <td class="comment">libnatpmp source code</td>
2528 <td></td>
2529</tr>
2530<tr>
2531 <td class="filename" colspan="2"><a href='download.php?file=mediatomb_minissdp-20081006.patch'>mediatomb_minissdp-20081006.patch</a></td>
2532 <td class="filedate">06/10/2008 15:48:18 +0000</td>
2533 <td class="comment"></td>
2534 <td></td>
2535</tr>
2536<tr>
2537 <td class="filename"><a href='download.php?file=miniupnpc-20081002.tar.gz'>miniupnpc-20081002.tar.gz</a></td>
2538 <td class="filesize">38291</td>
2539 <td class="filedate">02/10/2008 09:20:18 +0000</td>
2540 <td class="comment">MiniUPnP client source code</td>
2541 <td></td>
2542</tr>
2543<tr>
2544 <td class="filename"><a href='download.php?file=miniupnpd-20081001.tar.gz'>miniupnpd-20081001.tar.gz</a></td>
2545 <td class="filesize">79696</td>
2546 <td class="filedate">01/10/2008 13:11:20 +0000</td>
2547 <td class="comment">MiniUPnP daemon source code</td>
2548 <td></td>
2549</tr>
2550<tr>
2551 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080925.zip'>upnpc-exe-win32-20080925.zip</a></td>
2552 <td class="filesize">36602</td>
2553 <td class="filedate">25/09/2008 06:59:33 +0000</td>
2554 <td class="comment">Windows executable</td>
2555 <td></td>
2556</tr>
2557<tr>
2558 <td class="filename"><a href='download.php?file=miniupnpd-20080710.tar.gz'>miniupnpd-20080710.tar.gz</a></td>
2559 <td class="filesize">78898</td>
2560 <td class="filedate">10/07/2008 09:38:41 +0000</td>
2561 <td class="comment">MiniUPnP daemon source code</td>
2562 <td></td>
2563</tr>
2564<tr>
2565 <td class="filename"><a href='download.php?file=libnatpmp-20080707.tar.gz'>libnatpmp-20080707.tar.gz</a></td>
2566 <td class="filesize">11679</td>
2567 <td class="filedate">06/07/2008 22:05:23 +0000</td>
2568 <td class="comment">libnatpmp source code</td>
2569 <td></td>
2570</tr>
2571<tr>
2572 <td class="filename"><a href='download.php?file=miniupnpc-1.1.tar.gz'>miniupnpc-1.1.tar.gz</a></td>
2573 <td class="filesize">38235</td>
2574 <td class="filedate">04/07/2008 16:45:24 +0000</td>
2575 <td class="comment">MiniUPnP client release source code</td>
2576 <td></td>
2577</tr>
2578<tr>
2579 <td class="filename"><a href='download.php?file=miniupnpc-20080703.tar.gz'>miniupnpc-20080703.tar.gz</a></td>
2580 <td class="filesize">38204</td>
2581 <td class="filedate">03/07/2008 15:47:37 +0000</td>
2582 <td class="comment">MiniUPnP client source code</td>
2583 <td></td>
2584</tr>
2585<tr>
2586 <td class="filename"><a href='download.php?file=libnatpmp-20080703.tar.gz'>libnatpmp-20080703.tar.gz</a></td>
2587 <td class="filesize">11570</td>
2588 <td class="filedate">03/07/2008 15:47:25 +0000</td>
2589 <td class="comment">libnatpmp source code</td>
2590 <td></td>
2591</tr>
2592<tr>
2593 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080703.zip'>upnpc-exe-win32-20080703.zip</a></td>
2594 <td class="filesize">36137</td>
2595 <td class="filedate">02/07/2008 23:35:14 +0000</td>
2596 <td class="comment">Windows executable</td>
2597 <td></td>
2598</tr>
2599<tr>
2600 <td class="filename"><a href='download.php?file=libnatpmp-20080702.tar.gz'>libnatpmp-20080702.tar.gz</a></td>
2601 <td class="filesize">8873</td>
2602 <td class="filedate">02/07/2008 17:32:35 +0000</td>
2603 <td class="comment">libnatpmp source code</td>
2604 <td></td>
2605</tr>
2606<tr>
2607 <td class="filename"><a href='download.php?file=libnatpmp-20080630.tar.gz'>libnatpmp-20080630.tar.gz</a></td>
2608 <td class="filesize">8864</td>
2609 <td class="filedate">30/06/2008 14:20:16 +0000</td>
2610 <td class="comment">libnatpmp source code</td>
2611 <td></td>
2612</tr>
2613<tr>
2614 <td class="filename"><a href='download.php?file=libnatpmp-20080529.tar.gz'>libnatpmp-20080529.tar.gz</a></td>
2615 <td class="filesize">7397</td>
2616 <td class="filedate">29/05/2008 09:06:25 +0000</td>
2617 <td class="comment">libnatpmp source code</td>
2618 <td></td>
2619</tr>
2620<tr>
2621 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080514.zip'>upnpc-exe-win32-20080514.zip</a></td>
2622 <td class="filesize">14227</td>
2623 <td class="filedate">14/05/2008 20:23:19 +0000</td>
2624 <td class="comment">Windows executable</td>
2625 <td></td>
2626</tr>
2627<tr>
2628 <td class="filename"><a href='download.php?file=libnatpmp-20080428.tar.gz'>libnatpmp-20080428.tar.gz</a></td>
2629 <td class="filesize">7295</td>
2630 <td class="filedate">28/04/2008 03:09:14 +0000</td>
2631 <td class="comment">libnatpmp source code</td>
2632 <td></td>
2633</tr>
2634<tr>
2635 <td class="filename"><a href='download.php?file=miniupnpd-20080427.tar.gz'>miniupnpd-20080427.tar.gz</a></td>
2636 <td class="filesize">78765</td>
2637 <td class="filedate">27/04/2008 18:16:36 +0000</td>
2638 <td class="comment">MiniUPnP daemon source code</td>
2639 <td></td>
2640</tr>
2641<tr>
2642 <td class="filename"><a href='download.php?file=miniupnpc-20080427.tar.gz'>miniupnpc-20080427.tar.gz</a></td>
2643 <td class="filesize">37610</td>
2644 <td class="filedate">27/04/2008 18:16:35 +0000</td>
2645 <td class="comment">MiniUPnP client source code</td>
2646 <td></td>
2647</tr>
2648<tr>
2649 <td class="filename"><a href='download.php?file=miniupnpd-1.1.tar.gz'>miniupnpd-1.1.tar.gz</a></td>
2650 <td class="filesize">78594</td>
2651 <td class="filedate">25/04/2008 17:38:05 +0000</td>
2652 <td class="comment">MiniUPnP daemon release source code</td>
2653 <td></td>
2654</tr>
2655<tr>
2656 <td class="filename"><a href='download.php?file=miniupnpc-20080423.tar.gz'>miniupnpc-20080423.tar.gz</a></td>
2657 <td class="filesize">36818</td>
2658 <td class="filedate">23/04/2008 11:57:36 +0000</td>
2659 <td class="comment">MiniUPnP client source code</td>
2660 <td></td>
2661</tr>
2662<tr>
2663 <td class="filename"><a href='download.php?file=miniupnpd-20080308.tar.gz'>miniupnpd-20080308.tar.gz</a></td>
2664 <td class="filesize">75679</td>
2665 <td class="filedate">08/03/2008 11:13:29 +0000</td>
2666 <td class="comment">MiniUPnP daemon source code</td>
2667 <td></td>
2668</tr>
2669<tr>
2670 <td class="filename"><a href='download.php?file=miniupnpd-20080303.tar.gz'>miniupnpd-20080303.tar.gz</a></td>
2671 <td class="filesize">74202</td>
2672 <td class="filedate">03/03/2008 01:43:16 +0000</td>
2673 <td class="comment">MiniUPnP daemon source code</td>
2674 <td></td>
2675</tr>
2676<tr>
2677 <td class="filename"><a href='download.php?file=miniupnpd-20080224.tar.gz'>miniupnpd-20080224.tar.gz</a></td>
2678 <td class="filesize">72773</td>
2679 <td class="filedate">24/02/2008 11:23:17 +0000</td>
2680 <td class="comment">MiniUPnP daemon source code</td>
2681 <td></td>
2682</tr>
2683<tr>
2684 <td class="filename"><a href='download.php?file=miniupnpc-1.0.tar.gz'>miniupnpc-1.0.tar.gz</a></td>
2685 <td class="filesize">36223</td>
2686 <td class="filedate">21/02/2008 13:26:46 +0000</td>
2687 <td class="comment">MiniUPnP client release source code</td>
2688 <td></td>
2689</tr>
2690<tr>
2691 <td class="filename"><a href='download.php?file=miniupnpd-20080221.tar.gz'>miniupnpd-20080221.tar.gz</a></td>
2692 <td class="filesize">70823</td>
2693 <td class="filedate">21/02/2008 10:23:46 +0000</td>
2694 <td class="comment">MiniUPnP daemon source code</td>
2695 <td></td>
2696</tr>
2697<tr>
2698 <td class="filename"><a href='download.php?file=miniupnpc-20080217.tar.gz'>miniupnpc-20080217.tar.gz</a></td>
2699 <td class="filesize">35243</td>
2700 <td class="filedate">16/02/2008 23:47:59 +0000</td>
2701 <td class="comment">MiniUPnP client source code</td>
2702 <td></td>
2703</tr>
2704<tr>
2705 <td class="filename"><a href='download.php?file=miniupnpd-20080207.tar.gz'>miniupnpd-20080207.tar.gz</a></td>
2706 <td class="filesize">70647</td>
2707 <td class="filedate">07/02/2008 21:21:00 +0000</td>
2708 <td class="comment">MiniUPnP daemon source code</td>
2709 <td></td>
2710</tr>
2711<tr>
2712 <td class="filename"><a href='download.php?file=miniupnpc-20080203.tar.gz'>miniupnpc-20080203.tar.gz</a></td>
2713 <td class="filesize">34921</td>
2714 <td class="filedate">03/02/2008 22:28:11 +0000</td>
2715 <td class="comment">MiniUPnP client source code</td>
2716 <td></td>
2717</tr>
2718<tr>
2719 <td class="filename"><a href='download.php?file=miniupnpd-1.0.tar.gz'>miniupnpd-1.0.tar.gz</a></td>
2720 <td class="filesize">69427</td>
2721 <td class="filedate">27/01/2008 22:41:25 +0000</td>
2722 <td class="comment">MiniUPnP daemon release source code</td>
2723 <td></td>
2724</tr>
2725<tr>
2726 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080118.zip'>upnpc-exe-win32-20080118.zip</a></td>
2727 <td class="filesize">13582</td>
2728 <td class="filedate">18/01/2008 11:42:16 +0000</td>
2729 <td class="comment">Windows executable</td>
2730 <td></td>
2731</tr>
2732<tr>
2733 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC13.tar.gz'>miniupnpd-1.0-RC13.tar.gz</a></td>
2734 <td class="filesize">67892</td>
2735 <td class="filedate">03/01/2008 16:50:21 +0000</td>
2736 <td class="comment">MiniUPnP daemon release source code</td>
2737 <td></td>
2738</tr>
2739<tr>
2740 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC13.tar.gz'>miniupnpc-1.0-RC13.tar.gz</a></td>
2741 <td class="filesize">34820</td>
2742 <td class="filedate">03/01/2008 16:50:20 +0000</td>
2743 <td class="comment">MiniUPnP client release source code</td>
2744 <td></td>
2745</tr>
2746<tr>
2747 <td class="filename"><a href='download.php?file=miniupnpd-20071220.tar.gz'>miniupnpd-20071220.tar.gz</a></td>
2748 <td class="filesize">67211</td>
2749 <td class="filedate">20/12/2007 12:08:34 +0000</td>
2750 <td class="comment">MiniUPnP daemon source code</td>
2751 <td></td>
2752</tr>
2753<tr>
2754 <td class="filename"><a href='download.php?file=miniupnpc-20071219.tar.gz'>miniupnpc-20071219.tar.gz</a></td>
2755 <td class="filesize">34290</td>
2756 <td class="filedate">19/12/2007 18:31:47 +0000</td>
2757 <td class="comment">MiniUPnP client source code</td>
2758 <td></td>
2759</tr>
2760<tr>
2761 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC12.tar.gz'>minissdpd-1.0-RC12.tar.gz</a></td>
2762 <td class="filesize">9956</td>
2763 <td class="filedate">19/12/2007 18:30:12 +0000</td>
2764 <td class="comment">MiniSSDPd release source code</td>
2765 <td></td>
2766</tr>
2767<tr>
2768 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC12.tar.gz'>miniupnpd-1.0-RC12.tar.gz</a></td>
2769 <td class="filesize">66911</td>
2770 <td class="filedate">14/12/2007 17:39:20 +0000</td>
2771 <td class="comment">MiniUPnP daemon release source code</td>
2772 <td></td>
2773</tr>
2774<tr>
2775 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC12.tar.gz'>miniupnpc-1.0-RC12.tar.gz</a></td>
2776 <td class="filesize">32543</td>
2777 <td class="filedate">14/12/2007 17:39:19 +0000</td>
2778 <td class="comment">MiniUPnP client release source code</td>
2779 <td></td>
2780</tr>
2781<tr>
2782 <td class="filename"><a href='download.php?file=miniupnpc-20071213.tar.gz'>miniupnpc-20071213.tar.gz</a></td>
2783 <td class="filesize">32541</td>
2784 <td class="filedate">13/12/2007 17:09:51 +0000</td>
2785 <td class="comment">MiniUPnP client source code</td>
2786 <td></td>
2787</tr>
2788<tr>
2789 <td class="filename"><a href='download.php?file=miniupnpd-20071213.tar.gz'>miniupnpd-20071213.tar.gz</a></td>
2790 <td class="filesize">66826</td>
2791 <td class="filedate">13/12/2007 16:42:50 +0000</td>
2792 <td class="comment">MiniUPnP daemon source code</td>
2793 <td></td>
2794</tr>
2795<tr>
2796 <td class="filename"><a href='download.php?file=libnatpmp-20071213.tar.gz'>libnatpmp-20071213.tar.gz</a></td>
2797 <td class="filesize">5997</td>
2798 <td class="filedate">13/12/2007 14:56:30 +0000</td>
2799 <td class="comment">libnatpmp source code</td>
2800 <td></td>
2801</tr>
2802<tr>
2803 <td class="filename"><a href='download.php?file=libnatpmp-20071202.tar.gz'>libnatpmp-20071202.tar.gz</a></td>
2804 <td class="filesize">5664</td>
2805 <td class="filedate">02/12/2007 00:15:28 +0000</td>
2806 <td class="comment">libnatpmp source code</td>
2807 <td></td>
2808</tr>
2809<tr>
2810 <td class="filename"><a href='download.php?file=miniupnpd-20071103.tar.gz'>miniupnpd-20071103.tar.gz</a></td>
2811 <td class="filesize">65740</td>
2812 <td class="filedate">02/11/2007 23:58:38 +0000</td>
2813 <td class="comment">MiniUPnP daemon source code</td>
2814 <td></td>
2815</tr>
2816<tr>
2817 <td class="filename"><a href='download.php?file=miniupnpd-20071102.tar.gz'>miniupnpd-20071102.tar.gz</a></td>
2818 <td class="filesize">65733</td>
2819 <td class="filedate">02/11/2007 23:05:44 +0000</td>
2820 <td class="comment">MiniUPnP daemon source code</td>
2821 <td></td>
2822</tr>
2823<tr>
2824 <td class="filename"><a href='download.php?file=miniupnpc-20071103.tar.gz'>miniupnpc-20071103.tar.gz</a></td>
2825 <td class="filesize">32239</td>
2826 <td class="filedate">02/11/2007 23:05:34 +0000</td>
2827 <td class="comment">MiniUPnP client source code</td>
2828 <td></td>
2829</tr>
2830<tr>
2831 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC11.tar.gz'>miniupnpd-1.0-RC11.tar.gz</a></td>
2832 <td class="filesize">64828</td>
2833 <td class="filedate">25/10/2007 13:27:18 +0000</td>
2834 <td class="comment">MiniUPnP daemon release source code</td>
2835 <td></td>
2836</tr>
2837<tr>
2838 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC11.tar.gz'>miniupnpc-1.0-RC11.tar.gz</a></td>
2839 <td class="filesize">32161</td>
2840 <td class="filedate">25/10/2007 13:27:17 +0000</td>
2841 <td class="comment">MiniUPnP client release source code</td>
2842 <td></td>
2843</tr>
2844<tr>
2845 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071025.zip'>upnpc-exe-win32-20071025.zip</a></td>
2846 <td class="filesize">12809</td>
2847 <td class="filedate">24/10/2007 23:15:55 +0000</td>
2848 <td class="comment">Windows executable</td>
2849 <td></td>
2850</tr>
2851<tr>
2852 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC10.tar.gz'>miniupnpd-1.0-RC10.tar.gz</a></td>
2853 <td class="filesize">62674</td>
2854 <td class="filedate">12/10/2007 08:38:33 +0000</td>
2855 <td class="comment">MiniUPnP daemon release source code</td>
2856 <td></td>
2857</tr>
2858<tr>
2859 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC10.tar.gz'>miniupnpc-1.0-RC10.tar.gz</a></td>
2860 <td class="filesize">31962</td>
2861 <td class="filedate">12/10/2007 08:38:31 +0000</td>
2862 <td class="comment">MiniUPnP client release source code</td>
2863 <td></td>
2864</tr>
2865<tr>
2866 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC10.tar.gz'>minissdpd-1.0-RC10.tar.gz</a></td>
2867 <td class="filesize">9517</td>
2868 <td class="filedate">12/10/2007 08:38:30 +0000</td>
2869 <td class="comment">MiniSSDPd release source code</td>
2870 <td></td>
2871</tr>
2872<tr>
2873 <td class="filename"><a href='download.php?file=miniupnpc-20071003.tar.gz'>miniupnpc-20071003.tar.gz</a></td>
2874 <td class="filesize">31199</td>
2875 <td class="filedate">03/10/2007 15:30:13 +0000</td>
2876 <td class="comment">MiniUPnP client source code</td>
2877 <td></td>
2878</tr>
2879<tr>
2880 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071001.zip'>upnpc-exe-win32-20071001.zip</a></td>
2881 <td class="filesize">12604</td>
2882 <td class="filedate">01/10/2007 17:09:22 +0000</td>
2883 <td class="comment">Windows executable</td>
2884 <td></td>
2885</tr>
2886<tr>
2887 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC9.tar.gz'>miniupnpd-1.0-RC9.tar.gz</a></td>
2888 <td class="filesize">54778</td>
2889 <td class="filedate">27/09/2007 19:38:36 +0000</td>
2890 <td class="comment">MiniUPnP daemon release source code</td>
2891 <td></td>
2892</tr>
2893<tr>
2894 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC9.tar.gz'>minissdpd-1.0-RC9.tar.gz</a></td>
2895 <td class="filesize">9163</td>
2896 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2897 <td class="comment">MiniSSDPd release source code</td>
2898 <td></td>
2899</tr>
2900<tr>
2901 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC9.tar.gz'>miniupnpc-1.0-RC9.tar.gz</a></td>
2902 <td class="filesize">30538</td>
2903 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2904 <td class="comment">MiniUPnP client release source code</td>
2905 <td></td>
2906</tr>
2907<tr>
2908 <td class="filename"><a href='download.php?file=miniupnpd-20070924.tar.gz'>miniupnpd-20070924.tar.gz</a></td>
2909 <td class="filesize">52338</td>
2910 <td class="filedate">24/09/2007 20:26:05 +0000</td>
2911 <td class="comment">MiniUPnP daemon source code</td>
2912 <td></td>
2913</tr>
2914<tr>
2915 <td class="filename"><a href='download.php?file=miniupnpd-20070923.tar.gz'>miniupnpd-20070923.tar.gz</a></td>
2916 <td class="filesize">51060</td>
2917 <td class="filedate">23/09/2007 21:13:34 +0000</td>
2918 <td class="comment">MiniUPnP daemon source code</td>
2919 <td></td>
2920</tr>
2921<tr>
2922 <td class="filename"><a href='download.php?file=miniupnpc-20070923.tar.gz'>miniupnpc-20070923.tar.gz</a></td>
2923 <td class="filesize">30246</td>
2924 <td class="filedate">23/09/2007 21:13:33 +0000</td>
2925 <td class="comment">MiniUPnP client source code</td>
2926 <td></td>
2927</tr>
2928<tr>
2929 <td class="filename"><a href='download.php?file=minissdpd-20070923.tar.gz'>minissdpd-20070923.tar.gz</a></td>
2930 <td class="filesize">8978</td>
2931 <td class="filedate">23/09/2007 21:13:32 +0000</td>
2932 <td class="comment">MiniSSDPd source code</td>
2933 <td></td>
2934</tr>
2935<tr>
2936 <td class="filename"><a href='download.php?file=miniupnpc-20070902.tar.gz'>miniupnpc-20070902.tar.gz</a></td>
2937 <td class="filesize">30205</td>
2938 <td class="filedate">01/09/2007 23:47:23 +0000</td>
2939 <td class="comment">MiniUPnP client source code</td>
2940 <td></td>
2941</tr>
2942<tr>
2943 <td class="filename"><a href='download.php?file=minissdpd-20070902.tar.gz'>minissdpd-20070902.tar.gz</a></td>
2944 <td class="filesize">6539</td>
2945 <td class="filedate">01/09/2007 23:47:20 +0000</td>
2946 <td class="comment">MiniSSDPd source code</td>
2947 <td></td>
2948</tr>
2949<tr>
2950 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC8.tar.gz'>miniupnpd-1.0-RC8.tar.gz</a></td>
2951 <td class="filesize">50952</td>
2952 <td class="filedate">29/08/2007 10:56:09 +0000</td>
2953 <td class="comment">MiniUPnP daemon release source code</td>
2954 <td></td>
2955</tr>
2956<tr>
2957 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC8.tar.gz'>miniupnpc-1.0-RC8.tar.gz</a></td>
2958 <td class="filesize">29312</td>
2959 <td class="filedate">29/08/2007 10:56:08 +0000</td>
2960 <td class="comment">MiniUPnP client release source code</td>
2961 <td></td>
2962</tr>
2963<tr>
2964 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC7.tar.gz'>miniupnpd-1.0-RC7.tar.gz</a></td>
2965 <td class="filesize">50613</td>
2966 <td class="filedate">20/07/2007 00:15:45 +0000</td>
2967 <td class="comment">MiniUPnP daemon release source code</td>
2968 <td></td>
2969</tr>
2970<tr>
2971 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC6.tar.gz'>miniupnpd-1.0-RC6.tar.gz</a></td>
2972 <td class="filesize">49986</td>
2973 <td class="filedate">12/06/2007 17:12:07 +0000</td>
2974 <td class="comment">MiniUPnP daemon release source code</td>
2975 <td></td>
2976</tr>
2977<tr>
2978 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC6.tar.gz'>miniupnpc-1.0-RC6.tar.gz</a></td>
2979 <td class="filesize">29032</td>
2980 <td class="filedate">12/06/2007 17:12:06 +0000</td>
2981 <td class="comment">MiniUPnP client release source code</td>
2982 <td></td>
2983</tr>
2984<tr>
2985 <td class="filename"><a href='download.php?file=miniupnpd-20070607.tar.gz'>miniupnpd-20070607.tar.gz</a></td>
2986 <td class="filesize">49768</td>
2987 <td class="filedate">06/06/2007 23:12:00 +0000</td>
2988 <td class="comment">MiniUPnP daemon source code</td>
2989 <td></td>
2990</tr>
2991<tr>
2992 <td class="filename"><a href='download.php?file=miniupnpd-20070605.tar.gz'>miniupnpd-20070605.tar.gz</a></td>
2993 <td class="filesize">49710</td>
2994 <td class="filedate">05/06/2007 21:01:53 +0000</td>
2995 <td class="comment">MiniUPnP daemon source code</td>
2996 <td></td>
2997</tr>
2998<tr>
2999 <td class="filename"><a href='download.php?file=miniupnpd-20070521.tar.gz'>miniupnpd-20070521.tar.gz</a></td>
3000 <td class="filesize">48374</td>
3001 <td class="filedate">21/05/2007 13:07:43 +0000</td>
3002 <td class="comment">MiniUPnP daemon source code</td>
3003 <td></td>
3004</tr>
3005<tr>
3006 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20070519.zip'>upnpc-exe-win32-20070519.zip</a></td>
3007 <td class="filesize">10836</td>
3008 <td class="filedate">19/05/2007 13:14:15 +0000</td>
3009 <td class="comment">Windows executable</td>
3010 <td></td>
3011</tr>
3012<tr>
3013 <td class="filename"><a href='download.php?file=miniupnpc-20070515.tar.gz'>miniupnpc-20070515.tar.gz</a></td>
3014 <td class="filesize">25802</td>
3015 <td class="filedate">15/05/2007 18:15:25 +0000</td>
3016 <td class="comment">MiniUPnP client source code</td>
3017 <td></td>
3018</tr>
3019<tr>
3020 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC5.tar.gz'>miniupnpd-1.0-RC5.tar.gz</a></td>
3021 <td class="filesize">48064</td>
3022 <td class="filedate">10/05/2007 20:22:48 +0000</td>
3023 <td class="comment">MiniUPnP daemon release source code</td>
3024 <td></td>
3025</tr>
3026<tr>
3027 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC5.tar.gz'>miniupnpc-1.0-RC5.tar.gz</a></td>
3028 <td class="filesize">25242</td>
3029 <td class="filedate">10/05/2007 20:22:46 +0000</td>
3030 <td class="comment">MiniUPnP client release source code</td>
3031 <td></td>
3032</tr>
3033<tr>
3034 <td class="filename"><a href='download.php?file=miniupnpd-20070412.tar.gz'>miniupnpd-20070412.tar.gz</a></td>
3035 <td class="filesize">47807</td>
3036 <td class="filedate">12/04/2007 20:21:48 +0000</td>
3037 <td class="comment">MiniUPnP daemon source code</td>
3038 <td></td>
3039</tr>
3040<tr>
3041 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC4.tar.gz'>miniupnpd-1.0-RC4.tar.gz</a></td>
3042 <td class="filesize">47687</td>
3043 <td class="filedate">17/03/2007 11:43:13 +0000</td>
3044 <td class="comment">MiniUPnP daemon release source code</td>
3045 <td></td>
3046</tr>
3047<tr>
3048 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC4.tar.gz'>miniupnpc-1.0-RC4.tar.gz</a></td>
3049 <td class="filesize">25085</td>
3050 <td class="filedate">17/03/2007 11:43:10 +0000</td>
3051 <td class="comment">MiniUPnP client release source code</td>
3052 <td></td>
3053</tr>
3054<tr>
3055 <td class="filename"><a href='download.php?file=miniupnpd-20070311.tar.gz'>miniupnpd-20070311.tar.gz</a></td>
3056 <td class="filesize">47599</td>
3057 <td class="filedate">11/03/2007 00:25:26 +0000</td>
3058 <td class="comment">MiniUPnP daemon source code</td>
3059 <td></td>
3060</tr>
3061<tr>
3062 <td class="filename"><a href='download.php?file=miniupnpd-20070208.tar.gz'>miniupnpd-20070208.tar.gz</a></td>
3063 <td class="filesize">45084</td>
3064 <td class="filedate">07/02/2007 23:04:06 +0000</td>
3065 <td class="comment">MiniUPnP daemon source code</td>
3066 <td></td>
3067</tr>
3068<tr>
3069 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC3.tar.gz'>miniupnpd-1.0-RC3.tar.gz</a></td>
3070 <td class="filesize">44683</td>
3071 <td class="filedate">30/01/2007 23:00:44 +0000</td>
3072 <td class="comment">MiniUPnP daemon release source code</td>
3073 <td></td>
3074</tr>
3075<tr>
3076 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC3.tar.gz'>miniupnpc-1.0-RC3.tar.gz</a></td>
3077 <td class="filesize">25055</td>
3078 <td class="filedate">30/01/2007 23:00:42 +0000</td>
3079 <td class="comment">MiniUPnP client release source code</td>
3080 <td></td>
3081</tr>
3082<tr>
3083 <td class="filename"><a href='download.php?file=miniupnpd-20070130.tar.gz'>miniupnpd-20070130.tar.gz</a></td>
3084 <td class="filesize">43735</td>
3085 <td class="filedate">29/01/2007 23:26:16 +0000</td>
3086 <td class="comment">MiniUPnP daemon source code</td>
3087 <td></td>
3088</tr>
3089<tr>
3090 <td class="filename"><a href='download.php?file=miniupnpc-20070130.tar.gz'>miniupnpc-20070130.tar.gz</a></td>
3091 <td class="filesize">24466</td>
3092 <td class="filedate">29/01/2007 23:26:13 +0000</td>
3093 <td class="comment">MiniUPnP client source code</td>
3094 <td></td>
3095</tr>
3096<tr>
3097 <td class="filename"><a href='download.php?file=miniupnpd-20070127.tar.gz'>miniupnpd-20070127.tar.gz</a></td>
3098 <td class="filesize">42643</td>
3099 <td class="filedate">27/01/2007 16:02:35 +0000</td>
3100 <td class="comment">MiniUPnP daemon source code</td>
3101 <td></td>
3102</tr>
3103<tr>
3104 <td class="filename"><a href='download.php?file=miniupnpc-20070127.tar.gz'>miniupnpc-20070127.tar.gz</a></td>
3105 <td class="filesize">24241</td>
3106 <td class="filedate">27/01/2007 16:02:33 +0000</td>
3107 <td class="comment">MiniUPnP client source code</td>
3108 <td></td>
3109</tr>
3110<tr>
3111 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC2.tar.gz'>miniupnpd-1.0-RC2.tar.gz</a></td>
3112 <td class="filesize">40424</td>
3113 <td class="filedate">17/01/2007 16:13:05 +0000</td>
3114 <td class="comment">MiniUPnP daemon release source code</td>
3115 <td></td>
3116</tr>
3117<tr>
3118 <td class="filename"><a href='download.php?file=miniupnpd-20070112.tar.gz'>miniupnpd-20070112.tar.gz</a></td>
3119 <td class="filesize">40708</td>
3120 <td class="filedate">12/01/2007 13:40:50 +0000</td>
3121 <td class="comment">MiniUPnP daemon source code</td>
3122 <td></td>
3123</tr>
3124<tr>
3125 <td class="filename"><a href='download.php?file=miniupnpd-20070111.tar.gz'>miniupnpd-20070111.tar.gz</a></td>
3126 <td class="filesize">40651</td>
3127 <td class="filedate">11/01/2007 18:50:21 +0000</td>
3128 <td class="comment">MiniUPnP daemon source code</td>
3129 <td></td>
3130</tr>
3131<tr>
3132 <td class="filename"><a href='download.php?file=miniupnpd-20070108.tar.gz'>miniupnpd-20070108.tar.gz</a></td>
3133 <td class="filesize">40025</td>
3134 <td class="filedate">08/01/2007 10:02:14 +0000</td>
3135 <td class="comment">MiniUPnP daemon source code</td>
3136 <td></td>
3137</tr>
3138<tr>
3139 <td class="filename"><a href='download.php?file=miniupnpd-20070103.tar.gz'>miniupnpd-20070103.tar.gz</a></td>
3140 <td class="filesize">40065</td>
3141 <td class="filedate">03/01/2007 14:39:11 +0000</td>
3142 <td class="comment">MiniUPnP daemon source code</td>
3143 <td></td>
3144</tr>
3145<tr>
3146 <td class="filename"><a href='download.php?file=miniupnpc-20061214.tar.gz'>miniupnpc-20061214.tar.gz</a></td>
3147 <td class="filesize">24106</td>
3148 <td class="filedate">14/12/2006 15:43:54 +0000</td>
3149 <td class="comment">MiniUPnP client source code</td>
3150 <td></td>
3151</tr>
3152<tr>
3153 <td class="filename"><a href='download.php?file=miniupnpd-20061214.tar.gz'>miniupnpd-20061214.tar.gz</a></td>
3154 <td class="filesize">39750</td>
3155 <td class="filedate">14/12/2006 13:44:51 +0000</td>
3156 <td class="comment">MiniUPnP daemon source code</td>
3157 <td></td>
3158</tr>
3159<tr>
3160 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC1.tar.gz'>miniupnpd-1.0-RC1.tar.gz</a></td>
3161 <td class="filesize">39572</td>
3162 <td class="filedate">07/12/2006 10:55:31 +0000</td>
3163 <td class="comment">MiniUPnP daemon release source code</td>
3164 <td></td>
3165</tr>
3166<tr>
3167 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC1.tar.gz'>miniupnpc-1.0-RC1.tar.gz</a></td>
3168 <td class="filesize">23582</td>
3169 <td class="filedate">07/12/2006 10:55:30 +0000</td>
3170 <td class="comment">MiniUPnP client release source code</td>
3171 <td></td>
3172</tr>
3173<tr>
3174 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061201.zip'>upnpc-exe-win32-20061201.zip</a></td>
3175 <td class="filesize">10378</td>
3176 <td class="filedate">01/12/2006 00:33:08 +0000</td>
3177 <td class="comment">Windows executable</td>
3178 <td></td>
3179</tr>
3180<tr>
3181 <td class="filename"><a href='download.php?file=miniupnpd20061130.tar.gz'>miniupnpd20061130.tar.gz</a></td>
3182 <td class="filesize">37184</td>
3183 <td class="filedate">30/11/2006 12:25:25 +0000</td>
3184 <td class="comment">MiniUPnP daemon source code</td>
3185 <td></td>
3186</tr>
3187<tr>
3188 <td class="filename"><a href='download.php?file=miniupnpd20061129.tar.gz'>miniupnpd20061129.tar.gz</a></td>
3189 <td class="filesize">36045</td>
3190 <td class="filedate">29/11/2006 00:10:49 +0000</td>
3191 <td class="comment">MiniUPnP daemon source code</td>
3192 <td></td>
3193</tr>
3194<tr>
3195 <td class="filename"><a href='download.php?file=miniupnpd20061127.tar.gz'>miniupnpd20061127.tar.gz</a></td>
3196 <td class="filesize">34155</td>
3197 <td class="filedate">26/11/2006 23:15:28 +0000</td>
3198 <td class="comment">MiniUPnP daemon source code</td>
3199 <td></td>
3200</tr>
3201<tr>
3202 <td class="filename"><a href='download.php?file=miniupnpc20061123.tar.gz'>miniupnpc20061123.tar.gz</a></td>
3203 <td class="filesize">21004</td>
3204 <td class="filedate">23/11/2006 22:41:46 +0000</td>
3205 <td class="comment">MiniUPnP client source code</td>
3206 <td></td>
3207</tr>
3208<tr>
3209 <td class="filename" colspan="2"><a href='download.php?file=miniupnpd-bin-openwrt20061123.tar.gz'>miniupnpd-bin-openwrt20061123.tar.gz</a></td>
3210 <td class="filedate">23/11/2006 22:41:44 +0000</td>
3211 <td class="comment">Precompiled binaries for openwrt</td>
3212 <td></td>
3213</tr>
3214<tr>
3215 <td class="filename"><a href='download.php?file=miniupnpd20061123.tar.gz'>miniupnpd20061123.tar.gz</a></td>
3216 <td class="filesize">33809</td>
3217 <td class="filedate">23/11/2006 22:28:29 +0000</td>
3218 <td class="comment">MiniUPnP daemon source code</td>
3219 <td></td>
3220</tr>
3221<tr>
3222 <td class="filename"><a href='download.php?file=miniupnpc20061119.tar.gz'>miniupnpc20061119.tar.gz</a></td>
3223 <td class="filesize">20897</td>
3224 <td class="filedate">19/11/2006 22:50:37 +0000</td>
3225 <td class="comment">MiniUPnP client source code</td>
3226 <td></td>
3227</tr>
3228<tr>
3229 <td class="filename"><a href='download.php?file=miniupnpd20061119.tar.gz'>miniupnpd20061119.tar.gz</a></td>
3230 <td class="filesize">32580</td>
3231 <td class="filedate">19/11/2006 22:50:36 +0000</td>
3232 <td class="comment">MiniUPnP daemon source code</td>
3233 <td></td>
3234</tr>
3235<tr>
3236 <td class="filename"><a href='download.php?file=miniupnpd20061117.tar.gz'>miniupnpd20061117.tar.gz</a></td>
3237 <td class="filesize">32646</td>
3238 <td class="filedate">17/11/2006 13:29:33 +0000</td>
3239 <td class="comment">MiniUPnP daemon source code</td>
3240 <td></td>
3241</tr>
3242<tr>
3243 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061112.zip'>upnpc-exe-win32-20061112.zip</a></td>
3244 <td class="filesize">10262</td>
3245 <td class="filedate">12/11/2006 22:41:25 +0000</td>
3246 <td class="comment">Windows executable</td>
3247 <td></td>
3248</tr>
3249<tr>
3250 <td class="filename"><a href='download.php?file=miniupnpd20061112.tar.gz'>miniupnpd20061112.tar.gz</a></td>
3251 <td class="filesize">32023</td>
3252 <td class="filedate">12/11/2006 21:30:32 +0000</td>
3253 <td class="comment">MiniUPnP daemon source code</td>
3254 <td></td>
3255</tr>
3256<tr>
3257 <td class="filename"><a href='download.php?file=miniupnpc20061112.tar.gz'>miniupnpc20061112.tar.gz</a></td>
3258 <td class="filesize">21047</td>
3259 <td class="filedate">12/11/2006 21:30:31 +0000</td>
3260 <td class="comment">MiniUPnP client source code</td>
3261 <td></td>
3262</tr>
3263<tr>
3264 <td class="filename"><a href='download.php?file=miniupnpd20061110.tar.gz'>miniupnpd20061110.tar.gz</a></td>
3265 <td class="filesize">27926</td>
3266 <td class="filedate">09/11/2006 23:35:02 +0000</td>
3267 <td class="comment">MiniUPnP daemon source code</td>
3268 <td></td>
3269</tr>
3270<tr>
3271 <td class="filename"><a href='download.php?file=miniupnpc20061110.tar.gz'>miniupnpc20061110.tar.gz</a></td>
3272 <td class="filesize">21009</td>
3273 <td class="filedate">09/11/2006 23:32:19 +0000</td>
3274 <td class="comment">MiniUPnP client source code</td>
3275 <td></td>
3276</tr>
3277<tr>
3278 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061101.zip'>upnpc-exe-win32-20061101.zip</a></td>
3279 <td class="filesize">10089</td>
3280 <td class="filedate">08/11/2006 20:35:09 +0000</td>
3281 <td class="comment">Windows executable</td>
3282 <td></td>
3283</tr>
3284<tr>
3285 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061020.zip'>upnpc-exe-win32-20061020.zip</a></td>
3286 <td class="filesize">9183</td>
3287 <td class="filedate">08/11/2006 20:35:08 +0000</td>
3288 <td class="comment">Windows executable</td>
3289 <td></td>
3290</tr>
3291<tr>
3292 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060909.zip'>upnpc-exe-win32-20060909.zip</a></td>
3293 <td class="filesize">9994</td>
3294 <td class="filedate">08/11/2006 20:35:07 +0000</td>
3295 <td class="comment">Windows executable</td>
3296 <td></td>
3297</tr>
3298<tr>
3299 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060801.zip'>upnpc-exe-win32-20060801.zip</a></td>
3300 <td class="filesize">10002</td>
3301 <td class="filedate">08/11/2006 20:35:06 +0000</td>
3302 <td class="comment">Windows executable</td>
3303 <td></td>
3304</tr>
3305<tr>
3306 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060711.zip'>upnpc-exe-win32-20060711.zip</a></td>
3307 <td class="filesize">13733</td>
3308 <td class="filedate">08/11/2006 20:35:05 +0000</td>
3309 <td class="comment">Windows executable</td>
3310 <td></td>
3311</tr>
3312<tr>
3313 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060709.zip'>upnpc-exe-win32-20060709.zip</a></td>
3314 <td class="filesize">13713</td>
3315 <td class="filedate">08/11/2006 20:35:04 +0000</td>
3316 <td class="comment">Windows executable</td>
3317 <td></td>
3318</tr>
3319<tr>
3320 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060704.zip'>upnpc-exe-win32-20060704.zip</a></td>
3321 <td class="filesize">13297</td>
3322 <td class="filedate">08/11/2006 20:35:03 +0000</td>
3323 <td class="comment">Windows executable</td>
3324 <td></td>
3325</tr>
3326<tr>
3327 <td class="filename"><a href='download.php?file=miniupnpc20061107.tar.gz'>miniupnpc20061107.tar.gz</a></td>
3328 <td class="filesize">20708</td>
3329 <td class="filedate">06/11/2006 23:36:57 +0000</td>
3330 <td class="comment">MiniUPnP client source code</td>
3331 <td></td>
3332</tr>
3333<tr>
3334 <td class="filename"><a href='download.php?file=miniupnpd20061107.tar.gz'>miniupnpd20061107.tar.gz</a></td>
3335 <td class="filesize">26992</td>
3336 <td class="filedate">06/11/2006 23:35:06 +0000</td>
3337 <td class="comment">MiniUPnP daemon source code</td>
3338 <td></td>
3339</tr>
3340<tr>
3341 <td class="filename"><a href='download.php?file=miniupnpc20061106.tar.gz'>miniupnpc20061106.tar.gz</a></td>
3342 <td class="filesize">20575</td>
3343 <td class="filedate">06/11/2006 17:02:15 +0000</td>
3344 <td class="comment">MiniUPnP client source code</td>
3345 <td></td>
3346</tr>
3347<tr>
3348 <td class="filename"><a href='download.php?file=miniupnpd20061106.tar.gz'>miniupnpd20061106.tar.gz</a></td>
3349 <td class="filesize">26597</td>
3350 <td class="filedate">06/11/2006 15:39:10 +0000</td>
3351 <td class="comment">MiniUPnP daemon source code</td>
3352 <td></td>
3353</tr>
3354<tr>
3355 <td class="filename"><a href='download.php?file=miniupnpc20061101.tar.gz'>miniupnpc20061101.tar.gz</a></td>
3356 <td class="filesize">20395</td>
3357 <td class="filedate">04/11/2006 18:16:15 +0000</td>
3358 <td class="comment">MiniUPnP client source code</td>
3359 <td></td>
3360</tr>
3361<tr>
3362 <td class="filename"><a href='download.php?file=miniupnpc20061031.tar.gz'>miniupnpc20061031.tar.gz</a></td>
3363 <td class="filesize">20396</td>
3364 <td class="filedate">04/11/2006 18:16:13 +0000</td>
3365 <td class="comment">MiniUPnP client source code</td>
3366 <td></td>
3367</tr>
3368<tr>
3369 <td class="filename"><a href='download.php?file=miniupnpc20061023.tar.gz'>miniupnpc20061023.tar.gz</a></td>
3370 <td class="filesize">20109</td>
3371 <td class="filedate">04/11/2006 18:16:12 +0000</td>
3372 <td class="comment">MiniUPnP client source code</td>
3373 <td></td>
3374</tr>
3375<tr>
3376 <td class="filename"><a href='download.php?file=miniupnpc20061020.tar.gz'>miniupnpc20061020.tar.gz</a></td>
3377 <td class="filesize">19739</td>
3378 <td class="filedate">04/11/2006 18:16:10 +0000</td>
3379 <td class="comment">MiniUPnP client source code</td>
3380 <td></td>
3381</tr>
3382<tr>
3383 <td class="filename"><a href='download.php?file=miniupnpc20060909.tar.gz'>miniupnpc20060909.tar.gz</a></td>
3384 <td class="filesize">19285</td>
3385 <td class="filedate">04/11/2006 18:16:09 +0000</td>
3386 <td class="comment">MiniUPnP client source code</td>
3387 <td></td>
3388</tr>
3389<tr>
3390 <td class="filename"><a href='download.php?file=miniupnpc20060731.tar.gz'>miniupnpc20060731.tar.gz</a></td>
3391 <td class="filesize">19032</td>
3392 <td class="filedate">04/11/2006 18:16:07 +0000</td>
3393 <td class="comment">MiniUPnP client source code</td>
3394 <td></td>
3395</tr>
3396<tr>
3397 <td class="filename"><a href='download.php?file=miniupnpc20060711.tar.gz'>miniupnpc20060711.tar.gz</a></td>
3398 <td class="filesize">19151</td>
3399 <td class="filedate">04/11/2006 18:16:06 +0000</td>
3400 <td class="comment">MiniUPnP client source code</td>
3401 <td></td>
3402</tr>
3403<tr>
3404 <td class="filename"><a href='download.php?file=miniupnpc20060709.tar.gz'>miniupnpc20060709.tar.gz</a></td>
3405 <td class="filesize">19080</td>
3406 <td class="filedate">04/11/2006 18:16:04 +0000</td>
3407 <td class="comment">MiniUPnP client source code</td>
3408 <td></td>
3409</tr>
3410<tr>
3411 <td class="filename"><a href='download.php?file=miniupnpc20060703.tar.gz'>miniupnpc20060703.tar.gz</a></td>
3412 <td class="filesize">17906</td>
3413 <td class="filedate">04/11/2006 18:16:03 +0000</td>
3414 <td class="comment">MiniUPnP client source code</td>
3415 <td></td>
3416</tr>
3417<tr>
3418 <td class="filename"><a href='download.php?file=miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
3419 <td class="filesize">14840</td>
3420 <td class="filedate">04/11/2006 18:16:01 +0000</td>
3421 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
3422 <td></td>
3423</tr>
3424<tr>
3425 <td class="filename"><a href='download.php?file=miniupnpd20061029.tar.gz'>miniupnpd20061029.tar.gz</a></td>
3426 <td class="filesize">24197</td>
3427 <td class="filedate">03/11/2006 13:40:30 +0000</td>
3428 <td class="comment">MiniUPnP daemon source code</td>
3429 <td></td>
3430</tr>
3431<tr>
3432 <td class="filename"><a href='download.php?file=miniupnpd20061027.tar.gz'>miniupnpd20061027.tar.gz</a></td>
3433 <td class="filesize">23904</td>
3434 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3435 <td class="comment">MiniUPnP daemon source code</td>
3436 <td></td>
3437</tr>
3438<tr>
3439 <td class="filename"><a href='download.php?file=miniupnpd20061028.tar.gz'>miniupnpd20061028.tar.gz</a></td>
3440 <td class="filesize">24383</td>
3441 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3442 <td class="comment">MiniUPnP daemon source code</td>
3443 <td></td>
3444</tr>
3445<tr>
3446 <td class="filename"><a href='download.php?file=miniupnpd20061018.tar.gz'>miniupnpd20061018.tar.gz</a></td>
3447 <td class="filesize">23051</td>
3448 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3449 <td class="comment">MiniUPnP daemon source code</td>
3450 <td></td>
3451</tr>
3452<tr>
3453 <td class="filename"><a href='download.php?file=miniupnpd20061023.tar.gz'>miniupnpd20061023.tar.gz</a></td>
3454 <td class="filesize">23478</td>
3455 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3456 <td class="comment">MiniUPnP daemon source code</td>
3457 <td></td>
3458</tr>
3459<tr>
3460 <td class="filename"><a href='download.php?file=miniupnpd20060930.tar.gz'>miniupnpd20060930.tar.gz</a></td>
3461 <td class="filesize">22832</td>
3462 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3463 <td class="comment">MiniUPnP daemon source code</td>
3464 <td></td>
3465</tr>
3466<tr>
3467 <td class="filename"><a href='download.php?file=miniupnpd20060924.tar.gz'>miniupnpd20060924.tar.gz</a></td>
3468 <td class="filesize">22038</td>
3469 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3470 <td class="comment">MiniUPnP daemon source code</td>
3471 <td></td>
3472</tr>
3473<tr>
3474 <td class="filename"><a href='download.php?file=miniupnpd20060919.tar.gz'>miniupnpd20060919.tar.gz</a></td>
3475 <td class="filesize">21566</td>
3476 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3477 <td class="comment">MiniUPnP daemon source code</td>
3478 <td></td>
3479</tr>
3480<tr>
3481 <td class="filename"><a href='download.php?file=miniupnpd20060729.tar.gz'>miniupnpd20060729.tar.gz</a></td>
3482 <td class="filesize">19202</td>
3483 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3484 <td class="comment">MiniUPnP daemon source code</td>
3485 <td></td>
3486</tr>
3487<tr>
3488 <td class="filename"><a href='download.php?file=miniupnpd20060909.tar.gz'>miniupnpd20060909.tar.gz</a></td>
3489 <td class="filesize">19952</td>
3490 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3491 <td class="comment">MiniUPnP daemon source code</td>
3492 <td></td>
3493</tr>
3494</table>
3495
3496<p><a href="..">Home</a></p>
3497<p>Contact: miniupnp _AT_ free _DOT_ fr</p>
3498<p align="center">
3499<a href="https://validator.w3.org/check?uri=referer"><img src="https://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Transitional" height="31" width="88" /></a>
3500<a href="https://jigsaw.w3.org/css-validator/check/referer"><img style="border:0;width:88px;height:31px" src="https://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!" /></a>
3501<!--
3502 <a href="https://freshmeat.net/projects/miniupnp"><img src="https://s3.amazonaws.com/entp-tender-production/assets/bc5be96f147ec8db3c10fc017f1f53889904ef5b/fm_logo_white_150_normal.png" border="0" alt="freshmeat.net" /></a>
3503-->
3504<!-- https://futuresimple.github.com/images/github_logo.png -->
3505<!-- <a href="https://github.com/miniupnp/miniupnp"><img src="https://assets-cdn.github.com/images/modules/logos_page/GitHub-Logo.png" alt="github.com" height="31" /></a> -->
3506<a href="https://github.com/miniupnp/miniupnp"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://github.blog/wp-content/uploads/2008/12/forkme_left_green_007200.png" alt="Fork me on GitHub" /></a>
3507</p>
3508
3509<script type="text/javascript">
3510var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
3511document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
3512</script>
3513<script type="text/javascript">
3514try {
3515 var ua = 'UA-10295521';
3516 if(window.location.hostname == 'miniupnp.free.fr')
3517 ua += '-1';
3518 else if(window.location.hostname == 'miniupnp.tuxfamily.org')
3519 ua += '-2';
3520 else ua = '';
3521 if(ua != '') {
3522 var pageTracker = _gat._getTracker(ua);
3523 pageTracker._trackPageview();
3524 }
3525} catch(err) {}</script>
3526</body>
3527</html>
3528
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 85c1f79ff3..077472b8b3 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -7,7 +7,10 @@
7# 7#
8 8
9import contextlib 9import contextlib
10import shutil
10import unittest 11import unittest
12import unittest.mock
13import urllib.parse
11import hashlib 14import hashlib
12import tempfile 15import tempfile
13import collections 16import collections
@@ -17,6 +20,7 @@ import tarfile
17from bb.fetch2 import URI 20from bb.fetch2 import URI
18from bb.fetch2 import FetchMethod 21from bb.fetch2 import FetchMethod
19import bb 22import bb
23import bb.utils
20from bb.tests.support.httpserver import HTTPService 24from bb.tests.support.httpserver import HTTPService
21 25
22def skipIfNoNetwork(): 26def skipIfNoNetwork():
@@ -24,6 +28,18 @@ def skipIfNoNetwork():
24 return unittest.skip("network test") 28 return unittest.skip("network test")
25 return lambda f: f 29 return lambda f: f
26 30
31
32@contextlib.contextmanager
33def hide_directory(directory):
34 """Hide the given directory and restore it after the context is left"""
35 temp_name = directory + ".bak"
36 os.rename(directory, temp_name)
37 try:
38 yield
39 finally:
40 os.rename(temp_name, directory)
41
42
27class TestTimeout(Exception): 43class TestTimeout(Exception):
28 # Indicate to pytest that this is not a test suite 44 # Indicate to pytest that this is not a test suite
29 __test__ = False 45 __test__ = False
@@ -323,6 +339,21 @@ class URITest(unittest.TestCase):
323 'params': {"downloadfilename" : "EGPL-T101.zip"}, 339 'params': {"downloadfilename" : "EGPL-T101.zip"},
324 'query': {"9BE0BF6657": None}, 340 'query': {"9BE0BF6657": None},
325 'relative': False 341 'relative': False
342 },
343 "file://example@.service": {
344 'uri': 'file:example%40.service',
345 'scheme': 'file',
346 'hostname': '',
347 'port': None,
348 'hostport': '',
349 'path': 'example@.service',
350 'userinfo': '',
351 'userinfo': '',
352 'username': '',
353 'password': '',
354 'params': {},
355 'query': {},
356 'relative': True
326 } 357 }
327 358
328 } 359 }
@@ -459,16 +490,16 @@ class FetcherTest(unittest.TestCase):
459class MirrorUriTest(FetcherTest): 490class MirrorUriTest(FetcherTest):
460 491
461 replaceuris = { 492 replaceuris = {
462 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") 493 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "http://somewhere.org/somedir/")
463 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", 494 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
464 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 495 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
465 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 496 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
466 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 497 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
467 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 498 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
468 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") 499 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
469 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 500 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
470 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") 501 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
471 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", 502 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
472 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") 503 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
473 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 504 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
474 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") 505 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
@@ -481,12 +512,12 @@ class MirrorUriTest(FetcherTest):
481 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", 512 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
482 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") 513 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
483 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", 514 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
484 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 515 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
485 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 516 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
486 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 517 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
487 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 518 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
488 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") 519 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
489 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 520 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
490 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") 521 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
491 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 522 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
492 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") 523 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
@@ -502,6 +533,10 @@ class MirrorUriTest(FetcherTest):
502 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", 533 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
503 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz") 534 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
504 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", 535 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
536 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
537 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
538 ("git://internal.git.server.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
539 : None,
505 540
506 #Renaming files doesn't work 541 #Renaming files doesn't work
507 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" 542 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
@@ -510,8 +545,8 @@ class MirrorUriTest(FetcherTest):
510 545
511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ 546 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ 547 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
513 "https://.*/.* file:///someotherpath/downloads/ " \ 548 "https?://.*/.* file:///someotherpath/downloads/ " \
514 "http://.*/.* file:///someotherpath/downloads/" 549 "svn://svn.server1.com/ svn://svn.server2.com/"
515 550
516 def test_urireplace(self): 551 def test_urireplace(self):
517 self.d.setVar("FILESPATH", ".") 552 self.d.setVar("FILESPATH", ".")
@@ -520,7 +555,7 @@ class MirrorUriTest(FetcherTest):
520 ud.setup_localpath(self.d) 555 ud.setup_localpath(self.d)
521 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) 556 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
522 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) 557 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
523 self.assertEqual([v], newuris) 558 self.assertEqual([v] if v else [], newuris)
524 559
525 def test_urilist1(self): 560 def test_urilist1(self):
526 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 561 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -535,6 +570,13 @@ class MirrorUriTest(FetcherTest):
535 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 570 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
536 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 571 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
537 572
573 def test_urilistsvn(self):
574 # Catch svn:// -> svn:// bug
575 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
576 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
577 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
578 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
579
538 def test_mirror_of_mirror(self): 580 def test_mirror_of_mirror(self):
539 # Test if mirror of a mirror works 581 # Test if mirror of a mirror works
540 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" 582 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
@@ -547,16 +589,16 @@ class MirrorUriTest(FetcherTest):
547 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', 589 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
548 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) 590 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
549 591
550 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \ 592 recmirrorvar = "https://.*/[^/]* http://aaaa/A/A/A/ " \
551 "https://.*/[^/]* https://BBBB/B/B/B/" 593 "https://.*/[^/]* https://bbbb/B/B/B/"
552 594
553 def test_recursive(self): 595 def test_recursive(self):
554 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 596 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
555 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) 597 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
556 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 598 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
557 self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz', 599 self.assertEqual(uris, ['http://aaaa/A/A/A/bitbake/bitbake-1.0.tar.gz',
558 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz', 600 'https://bbbb/B/B/B/bitbake/bitbake-1.0.tar.gz',
559 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) 601 'http://aaaa/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
560 602
561 603
562class GitDownloadDirectoryNamingTest(FetcherTest): 604class GitDownloadDirectoryNamingTest(FetcherTest):
@@ -679,7 +721,7 @@ class GitShallowTarballNamingTest(FetcherTest):
679class CleanTarballTest(FetcherTest): 721class CleanTarballTest(FetcherTest):
680 def setUp(self): 722 def setUp(self):
681 super(CleanTarballTest, self).setUp() 723 super(CleanTarballTest, self).setUp()
682 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https" 724 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https;branch=master"
683 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 725 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
684 726
685 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 727 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
@@ -720,6 +762,7 @@ class FetcherLocalTest(FetcherTest):
720 os.makedirs(self.localsrcdir) 762 os.makedirs(self.localsrcdir)
721 touch(os.path.join(self.localsrcdir, 'a')) 763 touch(os.path.join(self.localsrcdir, 'a'))
722 touch(os.path.join(self.localsrcdir, 'b')) 764 touch(os.path.join(self.localsrcdir, 'b'))
765 touch(os.path.join(self.localsrcdir, 'c@d'))
723 os.makedirs(os.path.join(self.localsrcdir, 'dir')) 766 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
724 touch(os.path.join(self.localsrcdir, 'dir', 'c')) 767 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
725 touch(os.path.join(self.localsrcdir, 'dir', 'd')) 768 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
@@ -751,6 +794,10 @@ class FetcherLocalTest(FetcherTest):
751 tree = self.fetchUnpack(['file://a', 'file://dir/c']) 794 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
752 self.assertEqual(tree, ['a', 'dir/c']) 795 self.assertEqual(tree, ['a', 'dir/c'])
753 796
797 def test_local_at(self):
798 tree = self.fetchUnpack(['file://c@d'])
799 self.assertEqual(tree, ['c@d'])
800
754 def test_local_backslash(self): 801 def test_local_backslash(self):
755 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) 802 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device'])
756 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) 803 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device'])
@@ -1056,12 +1103,6 @@ class FetcherNetworkTest(FetcherTest):
1056 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1103 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
1057 1104
1058 @skipIfNoNetwork() 1105 @skipIfNoNetwork()
1059 def test_gitfetch_tagandrev(self):
1060 # SRCREV is set but does not match rev= parameter
1061 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https"
1062 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
1063
1064 @skipIfNoNetwork()
1065 def test_gitfetch_usehead(self): 1106 def test_gitfetch_usehead(self):
1066 # Since self.gitfetcher() sets SRCREV we expect this to override 1107 # Since self.gitfetcher() sets SRCREV we expect this to override
1067 # `usehead=1' and instead fetch the specified SRCREV. See 1108 # `usehead=1' and instead fetch the specified SRCREV. See
@@ -1095,7 +1136,7 @@ class FetcherNetworkTest(FetcherTest):
1095 @skipIfNoNetwork() 1136 @skipIfNoNetwork()
1096 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): 1137 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
1097 realurl = "https://git.openembedded.org/bitbake" 1138 realurl = "https://git.openembedded.org/bitbake"
1098 recipeurl = "git://someserver.org/bitbake;protocol=https" 1139 recipeurl = "git://someserver.org/bitbake;protocol=https;branch=master"
1099 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") 1140 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
1100 os.chdir(self.tempdir) 1141 os.chdir(self.tempdir)
1101 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir) 1142 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
@@ -1250,7 +1291,6 @@ class FetcherNetworkTest(FetcherTest):
1250 1291
1251class SVNTest(FetcherTest): 1292class SVNTest(FetcherTest):
1252 def skipIfNoSvn(): 1293 def skipIfNoSvn():
1253 import shutil
1254 if not shutil.which("svn"): 1294 if not shutil.which("svn"):
1255 return unittest.skip("svn not installed, tests being skipped") 1295 return unittest.skip("svn not installed, tests being skipped")
1256 1296
@@ -1373,15 +1413,17 @@ class TrustedNetworksTest(FetcherTest):
1373 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1413 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1374 1414
1375class URLHandle(unittest.TestCase): 1415class URLHandle(unittest.TestCase):
1376 1416 # Quote password as per RFC3986
1417 password = urllib.parse.quote(r"!#$%^&*()-_={}[]\|:?,.<>~`", r"!$&'/()*+,;=")
1377 datatable = { 1418 datatable = {
1378 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), 1419 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
1379 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), 1420 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
1380 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), 1421 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
1381 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}), 1422 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
1382 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), 1423 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
1424 "file://example@.service": ('file', '', 'example@.service', '', '', {}),
1383 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}), 1425 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
1384 r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}), 1426 'git://s.o-me_ONE:%s@git.openembedded.org/bitbake;branch=main;protocol=https' % password: ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', password, {'branch': 'main', 'protocol' : 'https'}),
1385 } 1427 }
1386 # we require a pathname to encodeurl but users can still pass such urls to 1428 # we require a pathname to encodeurl but users can still pass such urls to
1387 # decodeurl and we need to handle them 1429 # decodeurl and we need to handle them
@@ -1399,6 +1441,8 @@ class URLHandle(unittest.TestCase):
1399 def test_encodeurl(self): 1441 def test_encodeurl(self):
1400 for k, v in self.datatable.items(): 1442 for k, v in self.datatable.items():
1401 result = bb.fetch.encodeurl(v) 1443 result = bb.fetch.encodeurl(v)
1444 if result.startswith("file:"):
1445 result = urllib.parse.unquote(result)
1402 self.assertEqual(result, k) 1446 self.assertEqual(result, k)
1403 1447
1404class FetchLatestVersionTest(FetcherTest): 1448class FetchLatestVersionTest(FetcherTest):
@@ -1419,12 +1463,12 @@ class FetchLatestVersionTest(FetcherTest):
1419 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "") 1463 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1420 : "1.4.0", 1464 : "1.4.0",
1421 # combination version pattern 1465 # combination version pattern
1422 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") 1466 ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1423 : "1.2.0", 1467 : "1.2.0",
1424 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") 1468 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1425 : "2014.01", 1469 : "2014.01",
1426 # version pattern "yyyymmdd" 1470 # version pattern "yyyymmdd"
1427 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") 1471 ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1428 : "20120614", 1472 : "20120614",
1429 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1473 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1430 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1474 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
@@ -1440,98 +1484,126 @@ class FetchLatestVersionTest(FetcherTest):
1440 : "0.28.0", 1484 : "0.28.0",
1441 } 1485 }
1442 1486
1487 WgetTestData = collections.namedtuple("WgetTestData", ["pn", "path", "pv", "check_uri", "check_regex"], defaults=[None, None, None])
1443 test_wget_uris = { 1488 test_wget_uris = {
1444 # 1489 #
1445 # packages with versions inside directory name 1490 # packages with versions inside directory name
1446 # 1491 #
1447 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 1492 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2
1448 ("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "") 1493 WgetTestData("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2")
1449 : "2.24.2", 1494 : "2.24.2",
1450 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz 1495 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz
1451 ("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "") 1496 WgetTestData("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz")
1452 : "1.6.0", 1497 : "1.6.0",
1453 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 1498 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
1454 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") 1499 WgetTestData("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz")
1455 : "2.8.12.1", 1500 : "2.8.12.1",
1456 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz 1501 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
1457 ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "") 1502 WgetTestData("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz")
1458 : "2.10.3", 1503 : "2.10.3",
1459 # 1504 #
1460 # packages with versions only in current directory 1505 # packages with versions only in current directory
1461 # 1506 #
1462 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 1507 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
1463 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") 1508 WgetTestData("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2")
1464 : "2.19", 1509 : "2.19",
1465 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 1510 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
1466 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") 1511 WgetTestData("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2")
1467 : "20120814", 1512 : "20120814",
1468 # 1513 #
1469 # packages with "99" in the name of possible version 1514 # packages with "99" in the name of possible version
1470 # 1515 #
1471 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz 1516 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz
1472 ("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "") 1517 WgetTestData("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz")
1473 : "5.0", 1518 : "5.0",
1474 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 1519 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2
1475 ("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "") 1520 WgetTestData("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2")
1476 : "1.15.1", 1521 : "1.15.1",
1477 # 1522 #
1478 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX 1523 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
1479 # 1524 #
1480 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1525 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1481 # https://github.com/apple/cups/releases 1526 # https://github.com/apple/cups/releases
1482 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1527 WgetTestData("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", check_uri="/apple/cups/releases", check_regex=r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1483 : "2.0.0", 1528 : "2.0.0",
1484 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1529 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1485 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1530 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1486 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1531 WgetTestData("db", "/berkeley-db/db-5.3.21.tar.gz", check_uri="/debian/pool/main/d/db5.3/", check_regex=r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1487 : "5.3.10", 1532 : "5.3.10",
1488 # 1533 #
1489 # packages where the tarball compression changed in the new version 1534 # packages where the tarball compression changed in the new version
1490 # 1535 #
1491 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz 1536 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
1492 ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "") 1537 WgetTestData("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz")
1493 : "2.8", 1538 : "2.8",
1539
1540 #
1541 # packages where the path doesn't actually contain the filename, so downloadfilename should be respected
1542 #
1543 WgetTestData("miniupnpd", "/software/miniupnp/download.php?file=miniupnpd_2.1.20191006.tar.gz;downloadfilename=miniupnpd_2.1.20191006.tar.gz", pv="2.1.20191006", check_uri="/software/miniupnp/download.php", check_regex=r"miniupnpd-(?P<pver>\d+(\.\d+)+)\.tar")
1544 : "2.3.7",
1494 } 1545 }
1495 1546
1547 test_crate_uris = {
1548 # basic example; version pattern "A.B.C+cargo-D.E.F"
1549 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1550 : "0.9.29"
1551 }
1552
1496 @skipIfNoNetwork() 1553 @skipIfNoNetwork()
1497 def test_git_latest_versionstring(self): 1554 def test_git_latest_versionstring(self):
1498 for k, v in self.test_git_uris.items(): 1555 for k, v in self.test_git_uris.items():
1499 self.d.setVar("PN", k[0]) 1556 with self.subTest(pn=k[0]):
1500 self.d.setVar("SRCREV", k[2]) 1557 self.d.setVar("PN", k[0])
1501 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) 1558 self.d.setVar("SRCREV", k[2])
1502 ud = bb.fetch2.FetchData(k[1], self.d) 1559 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
1503 pupver= ud.method.latest_versionstring(ud, self.d) 1560 ud = bb.fetch2.FetchData(k[1], self.d)
1504 verstring = pupver[0] 1561 pupver= ud.method.latest_versionstring(ud, self.d)
1505 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1562 verstring = pupver[0]
1506 r = bb.utils.vercmp_string(v, verstring) 1563 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1507 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1564 r = bb.utils.vercmp_string(v, verstring)
1508 if k[4]: 1565 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1509 r = bb.utils.vercmp_string(verstring, k[4]) 1566 if k[4]:
1510 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4])) 1567 r = bb.utils.vercmp_string(verstring, k[4])
1568 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
1511 1569
1512 def test_wget_latest_versionstring(self): 1570 def test_wget_latest_versionstring(self):
1513 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1571 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
1514 server = HTTPService(testdata) 1572 server = HTTPService(testdata, host="127.0.0.1")
1515 server.start() 1573 server.start()
1516 port = server.port 1574 port = server.port
1517 try: 1575 try:
1518 for k, v in self.test_wget_uris.items(): 1576 for data, v in self.test_wget_uris.items():
1577 with self.subTest(pn=data.pn):
1578 self.d.setVar("PN", data.pn)
1579 self.d.setVar("PV", data.pv)
1580 if data.check_uri:
1581 checkuri = "http://127.0.0.1:%s/%s" % (port, data.check_uri)
1582 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1583 if data.check_regex:
1584 self.d.setVar("UPSTREAM_CHECK_REGEX", data.check_regex)
1585
1586 url = "http://127.0.0.1:%s/%s" % (port, data.path)
1587 ud = bb.fetch2.FetchData(url, self.d)
1588 pupver = ud.method.latest_versionstring(ud, self.d)
1589 verstring = pupver[0]
1590 self.assertTrue(verstring, msg="Could not find upstream version for %s" % data.pn)
1591 r = bb.utils.vercmp_string(v, verstring)
1592 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (data.pn, v, verstring))
1593 finally:
1594 server.stop()
1595
1596 @skipIfNoNetwork()
1597 def test_crate_latest_versionstring(self):
1598 for k, v in self.test_crate_uris.items():
1599 with self.subTest(pn=k[0]):
1519 self.d.setVar("PN", k[0]) 1600 self.d.setVar("PN", k[0])
1520 checkuri = "" 1601 ud = bb.fetch2.FetchData(k[1], self.d)
1521 if k[2]:
1522 checkuri = "http://localhost:%s/" % port + k[2]
1523 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1524 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
1525 url = "http://localhost:%s/" % port + k[1]
1526 ud = bb.fetch2.FetchData(url, self.d)
1527 pupver = ud.method.latest_versionstring(ud, self.d) 1602 pupver = ud.method.latest_versionstring(ud, self.d)
1528 verstring = pupver[0] 1603 verstring = pupver[0]
1529 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1604 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1530 r = bb.utils.vercmp_string(v, verstring) 1605 r = bb.utils.vercmp_string(v, verstring)
1531 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1606 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1532 finally:
1533 server.stop()
1534
1535 1607
1536class FetchCheckStatusTest(FetcherTest): 1608class FetchCheckStatusTest(FetcherTest):
1537 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1609 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
@@ -1715,6 +1787,8 @@ class GitShallowTest(FetcherTest):
1715 if cwd is None: 1787 if cwd is None:
1716 cwd = self.gitdir 1788 cwd = self.gitdir
1717 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() 1789 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines()
1790 # Resolve references into the same format as the comparision (needed by git 2.48 onwards)
1791 actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines()
1718 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() 1792 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines()
1719 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) 1793 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs)))
1720 1794
@@ -1761,7 +1835,6 @@ class GitShallowTest(FetcherTest):
1761 def fetch_shallow(self, uri=None, disabled=False, keepclone=False): 1835 def fetch_shallow(self, uri=None, disabled=False, keepclone=False):
1762 """Fetch a uri, generating a shallow tarball, then unpack using it""" 1836 """Fetch a uri, generating a shallow tarball, then unpack using it"""
1763 fetcher, ud = self.fetch_and_unpack(uri) 1837 fetcher, ud = self.fetch_and_unpack(uri)
1764 assert os.path.exists(ud.clonedir), 'Git clone in DLDIR (%s) does not exist for uri %s' % (ud.clonedir, uri)
1765 1838
1766 # Confirm that the unpacked repo is unshallow 1839 # Confirm that the unpacked repo is unshallow
1767 if not disabled: 1840 if not disabled:
@@ -1769,9 +1842,10 @@ class GitShallowTest(FetcherTest):
1769 1842
1770 # fetch and unpack, from the shallow tarball 1843 # fetch and unpack, from the shallow tarball
1771 bb.utils.remove(self.gitdir, recurse=True) 1844 bb.utils.remove(self.gitdir, recurse=True)
1772 bb.process.run('chmod u+w -R "%s"' % ud.clonedir) 1845 if os.path.exists(ud.clonedir):
1773 bb.utils.remove(ud.clonedir, recurse=True) 1846 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1774 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1847 bb.utils.remove(ud.clonedir, recurse=True)
1848 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1775 1849
1776 # confirm that the unpacked repo is used when no git clone or git 1850 # confirm that the unpacked repo is used when no git clone or git
1777 # mirror tarball is available 1851 # mirror tarball is available
@@ -1854,7 +1928,12 @@ class GitShallowTest(FetcherTest):
1854 self.add_empty_file('c') 1928 self.add_empty_file('c')
1855 self.assertRevCount(3, cwd=self.srcdir) 1929 self.assertRevCount(3, cwd=self.srcdir)
1856 1930
1931 # Clone without tarball
1932 self.d.setVar('BB_GIT_SHALLOW', '0')
1933 fetcher, ud = self.fetch()
1934
1857 # Clone and generate mirror tarball 1935 # Clone and generate mirror tarball
1936 self.d.setVar('BB_GIT_SHALLOW', '1')
1858 fetcher, ud = self.fetch() 1937 fetcher, ud = self.fetch()
1859 1938
1860 # Ensure we have a current mirror tarball, but an out of date clone 1939 # Ensure we have a current mirror tarball, but an out of date clone
@@ -1866,6 +1945,7 @@ class GitShallowTest(FetcherTest):
1866 fetcher, ud = self.fetch() 1945 fetcher, ud = self.fetch()
1867 fetcher.unpack(self.d.getVar('WORKDIR')) 1946 fetcher.unpack(self.d.getVar('WORKDIR'))
1868 self.assertRevCount(1) 1947 self.assertRevCount(1)
1948 assert os.path.exists(os.path.join(self.d.getVar('WORKDIR'), 'git', 'c'))
1869 1949
1870 def test_shallow_single_branch_no_merge(self): 1950 def test_shallow_single_branch_no_merge(self):
1871 self.add_empty_file('a') 1951 self.add_empty_file('a')
@@ -1963,7 +2043,7 @@ class GitShallowTest(FetcherTest):
1963 self.git('submodule update', cwd=self.srcdir) 2043 self.git('submodule update', cwd=self.srcdir)
1964 self.git('commit -m submodule -a', cwd=self.srcdir) 2044 self.git('commit -m submodule -a', cwd=self.srcdir)
1965 2045
1966 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2046 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1967 2047
1968 # Fetch once to generate the shallow tarball 2048 # Fetch once to generate the shallow tarball
1969 fetcher, ud = self.fetch(uri) 2049 fetcher, ud = self.fetch(uri)
@@ -2004,70 +2084,17 @@ class GitShallowTest(FetcherTest):
2004 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] 2084 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0]
2005 assert os.path.exists(os.path.join(self.gitdir, 'c')) 2085 assert os.path.exists(os.path.join(self.gitdir, 'c'))
2006 2086
2007 def test_shallow_multi_one_uri(self):
2008 # Create initial git repo
2009 self.add_empty_file('a')
2010 self.add_empty_file('b')
2011 self.git('checkout -b a_branch', cwd=self.srcdir)
2012 self.add_empty_file('c')
2013 self.add_empty_file('d')
2014 self.git('checkout master', cwd=self.srcdir)
2015 self.git('tag v0.0 a_branch', cwd=self.srcdir)
2016 self.add_empty_file('e')
2017 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
2018 self.add_empty_file('f')
2019 self.assertRevCount(7, cwd=self.srcdir)
2020
2021 uri = self.d.getVar('SRC_URI').split()[0]
2022 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
2023
2024 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2025 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2026 self.d.setVar('SRCREV_master', '${AUTOREV}')
2027 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
2028
2029 self.fetch_shallow(uri)
2030
2031 self.assertRevCount(5)
2032 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
2033
2034 def test_shallow_multi_one_uri_depths(self):
2035 # Create initial git repo
2036 self.add_empty_file('a')
2037 self.add_empty_file('b')
2038 self.git('checkout -b a_branch', cwd=self.srcdir)
2039 self.add_empty_file('c')
2040 self.add_empty_file('d')
2041 self.git('checkout master', cwd=self.srcdir)
2042 self.add_empty_file('e')
2043 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
2044 self.add_empty_file('f')
2045 self.assertRevCount(7, cwd=self.srcdir)
2046
2047 uri = self.d.getVar('SRC_URI').split()[0]
2048 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
2049
2050 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2051 self.d.setVar('BB_GIT_SHALLOW_DEPTH_master', '3')
2052 self.d.setVar('BB_GIT_SHALLOW_DEPTH_a_branch', '1')
2053 self.d.setVar('SRCREV_master', '${AUTOREV}')
2054 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
2055
2056 self.fetch_shallow(uri)
2057
2058 self.assertRevCount(4, ['--all'])
2059 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
2060
2061 def test_shallow_clone_preferred_over_shallow(self): 2087 def test_shallow_clone_preferred_over_shallow(self):
2062 self.add_empty_file('a') 2088 self.add_empty_file('a')
2063 self.add_empty_file('b') 2089 self.add_empty_file('b')
2064 2090
2065 # Fetch once to generate the shallow tarball 2091 # Fetch once to generate the shallow tarball
2092 self.d.setVar('BB_GIT_SHALLOW', '0')
2066 fetcher, ud = self.fetch() 2093 fetcher, ud = self.fetch()
2067 assert os.path.exists(os.path.join(self.dldir, ud.mirrortarballs[0]))
2068 2094
2069 # Fetch and unpack with both the clonedir and shallow tarball available 2095 # Fetch and unpack with both the clonedir and shallow tarball available
2070 bb.utils.remove(self.gitdir, recurse=True) 2096 bb.utils.remove(self.gitdir, recurse=True)
2097 self.d.setVar('BB_GIT_SHALLOW', '1')
2071 fetcher, ud = self.fetch_and_unpack() 2098 fetcher, ud = self.fetch_and_unpack()
2072 2099
2073 # The unpacked tree should *not* be shallow 2100 # The unpacked tree should *not* be shallow
@@ -2175,7 +2202,7 @@ class GitShallowTest(FetcherTest):
2175 2202
2176 self.fetch_shallow() 2203 self.fetch_shallow()
2177 2204
2178 self.assertRevCount(5) 2205 self.assertRevCount(2)
2179 2206
2180 def test_shallow_invalid_revs(self): 2207 def test_shallow_invalid_revs(self):
2181 self.add_empty_file('a') 2208 self.add_empty_file('a')
@@ -2194,7 +2221,10 @@ class GitShallowTest(FetcherTest):
2194 self.git('tag v0.0 master', cwd=self.srcdir) 2221 self.git('tag v0.0 master', cwd=self.srcdir)
2195 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') 2222 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2196 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') 2223 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2197 self.fetch_shallow() 2224
2225 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm:
2226 self.fetch_shallow()
2227 self.assertIn("fatal: no commits selected for shallow requests", cm.output[0])
2198 2228
2199 def test_shallow_fetch_missing_revs_fails(self): 2229 def test_shallow_fetch_missing_revs_fails(self):
2200 self.add_empty_file('a') 2230 self.add_empty_file('a')
@@ -2208,6 +2238,33 @@ class GitShallowTest(FetcherTest):
2208 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) 2238 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0])
2209 2239
2210 @skipIfNoNetwork() 2240 @skipIfNoNetwork()
2241 def test_git_shallow_fetch_premirrors(self):
2242 url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
2243
2244 # Create a separate premirror directory within tempdir
2245 premirror = os.path.join(self.tempdir, "premirror")
2246 os.mkdir(premirror)
2247
2248 # Fetch a non-shallow clone into the premirror subdir
2249 self.d.setVar('BB_GIT_SHALLOW', '0')
2250 self.d.setVar("DL_DIR", premirror)
2251 fetcher, ud = self.fetch(url)
2252
2253 # Fetch a shallow clone from the premirror subdir with unpacking
2254 # using the original recipe URL and the premirror mapping
2255 self.d.setVar('BB_GIT_SHALLOW', '1')
2256 self.d.setVar("DL_DIR", self.dldir)
2257 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2258 self.d.setVar('BB_NO_NETWORK', '1')
2259 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
2260 self.d.setVar("PREMIRRORS", "git://.*/.* git://{0};protocol=file".format(premirror + "/git2/" + ud.host + ud.path.replace("/", ".")))
2261 fetcher = self.fetch_and_unpack(url)
2262
2263 # Verify that the unpacked sources are shallow clones
2264 self.assertRevCount(1)
2265 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2266
2267 @skipIfNoNetwork()
2211 def test_bitbake(self): 2268 def test_bitbake(self):
2212 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir) 2269 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
2213 self.git('config core.bare true', cwd=self.srcdir) 2270 self.git('config core.bare true', cwd=self.srcdir)
@@ -2225,7 +2282,7 @@ class GitShallowTest(FetcherTest):
2225 revs = len(self.git('rev-list master').splitlines()) 2282 revs = len(self.git('rev-list master').splitlines())
2226 self.assertNotEqual(orig_revs, revs) 2283 self.assertNotEqual(orig_revs, revs)
2227 self.assertRefs(['master', 'origin/master']) 2284 self.assertRefs(['master', 'origin/master'])
2228 self.assertRevCount(orig_revs - 1758) 2285 self.assertRevCount(orig_revs - 1760)
2229 2286
2230 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): 2287 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
2231 self.add_empty_file('a') 2288 self.add_empty_file('a')
@@ -2239,23 +2296,33 @@ class GitShallowTest(FetcherTest):
2239 self.assertIn("No up to date source found", context.exception.msg) 2296 self.assertIn("No up to date source found", context.exception.msg)
2240 self.assertIn("clone directory not available or not up to date", context.exception.msg) 2297 self.assertIn("clone directory not available or not up to date", context.exception.msg)
2241 2298
2242 @skipIfNoNetwork() 2299 def test_shallow_check_is_shallow(self):
2243 def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self): 2300 self.add_empty_file('a')
2244 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') 2301 self.add_empty_file('b')
2245 self.d.setVar('BB_GIT_SHALLOW', '1')
2246 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
2247 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d)
2248 fetcher.download()
2249 2302
2250 bb.utils.remove(self.dldir + "/*.tar.gz") 2303 # Fetch and unpack without the clonedir and *only* shallow tarball available
2251 fetcher.unpack(self.unpackdir) 2304 bb.utils.remove(self.gitdir, recurse=True)
2305 fetcher, ud = self.fetch_and_unpack()
2252 2306
2253 dir = os.listdir(self.unpackdir + "/git/") 2307 # The unpacked tree *should* be shallow
2254 self.assertIn("fstests.doap", dir) 2308 self.assertRevCount(1)
2309 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2310
2311 def test_shallow_succeeds_with_tag_containing_slash(self):
2312 self.add_empty_file('a')
2313 self.add_empty_file('b')
2314 self.git('tag t1/t2/t3', cwd=self.srcdir)
2315 self.assertRevCount(2, cwd=self.srcdir)
2316
2317 srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
2318 self.d.setVar('SRCREV', srcrev)
2319 uri = self.d.getVar('SRC_URI').split()[0]
2320 uri = '%s;tag=t1/t2/t3' % uri
2321 self.fetch_shallow(uri)
2322 self.assertRevCount(1)
2255 2323
2256class GitLfsTest(FetcherTest): 2324class GitLfsTest(FetcherTest):
2257 def skipIfNoGitLFS(): 2325 def skipIfNoGitLFS():
2258 import shutil
2259 if not shutil.which('git-lfs'): 2326 if not shutil.which('git-lfs'):
2260 return unittest.skip('git-lfs not installed') 2327 return unittest.skip('git-lfs not installed')
2261 return lambda f: f 2328 return lambda f: f
@@ -2279,12 +2346,18 @@ class GitLfsTest(FetcherTest):
2279 self.git_init(cwd=self.srcdir) 2346 self.git_init(cwd=self.srcdir)
2280 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text') 2347 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
2281 2348
2282 def commit_file(self, filename, content): 2349 def commit(self, *, cwd=None):
2283 with open(os.path.join(self.srcdir, filename), "w") as f: 2350 cwd = cwd or self.srcdir
2351 self.git(["commit", "-m", "Change"], cwd=cwd)
2352 return self.git(["rev-parse", "HEAD"], cwd=cwd).strip()
2353
2354 def commit_file(self, filename, content, *, cwd=None):
2355 cwd = cwd or self.srcdir
2356
2357 with open(os.path.join(cwd, filename), "w") as f:
2284 f.write(content) 2358 f.write(content)
2285 self.git(["add", filename], cwd=self.srcdir) 2359 self.git(["add", filename], cwd=cwd)
2286 self.git(["commit", "-m", "Change"], cwd=self.srcdir) 2360 return self.commit(cwd=cwd)
2287 return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip()
2288 2361
2289 def fetch(self, uri=None, download=True): 2362 def fetch(self, uri=None, download=True):
2290 uris = self.d.getVar('SRC_URI').split() 2363 uris = self.d.getVar('SRC_URI').split()
@@ -2305,25 +2378,112 @@ class GitLfsTest(FetcherTest):
2305 return unpacked_lfs_file 2378 return unpacked_lfs_file
2306 2379
2307 @skipIfNoGitLFS() 2380 @skipIfNoGitLFS()
2381 def test_gitsm_lfs(self):
2382 """Test that the gitsm fetcher caches objects stored via LFS"""
2383 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2384
2385 def fetch_and_verify(revision, filename, content):
2386 self.d.setVar('SRCREV', revision)
2387 fetcher, ud = self.fetch()
2388
2389 with hide_directory(submoduledir), hide_directory(self.srcdir):
2390 workdir = self.d.getVar('WORKDIR')
2391 fetcher.unpack(workdir)
2392
2393 with open(os.path.join(workdir, "git", filename)) as f:
2394 self.assertEqual(f.read(), content)
2395
2396 # Create the git repository that will later be used as a submodule
2397 submoduledir = self.tempdir + "/submodule"
2398 bb.utils.mkdirhier(submoduledir)
2399 self.git_init(submoduledir)
2400 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2401 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2402
2403 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2404 _ = self.commit_file("a.mp3", "submodule version 2", cwd=submoduledir)
2405
2406 # Add the submodule to the repository at its current HEAD revision
2407 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2408 cwd=self.srcdir)
2409 base_commit_1 = self.commit()
2410
2411 # Let the submodule point at a different revision
2412 self.git(["checkout", submodule_commit_1], self.srcdir + "/submodule")
2413 self.git(["add", "submodule"], cwd=self.srcdir)
2414 base_commit_2 = self.commit()
2415
2416 # Add a LFS file to the repository
2417 base_commit_3 = self.commit_file("a.mp3", "version 1")
2418 # Update the added LFS file
2419 base_commit_4 = self.commit_file("a.mp3", "version 2")
2420
2421 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2422
2423 # Verify that LFS objects referenced from submodules are fetched and checked out
2424 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 2")
2425 # Verify that the repository inside the download cache of a submodile is extended with any
2426 # additional LFS objects needed when checking out a different revision.
2427 fetch_and_verify(base_commit_2, "submodule/a.mp3", "submodule version 1")
2428 # Verify that LFS objects referenced from the base repository are fetched and checked out
2429 fetch_and_verify(base_commit_3, "a.mp3", "version 1")
2430 # Verify that the cached repository is extended with any additional LFS objects required
2431 # when checking out a different revision.
2432 fetch_and_verify(base_commit_4, "a.mp3", "version 2")
2433
2434 @skipIfNoGitLFS()
2435 def test_gitsm_lfs_disabled(self):
2436 """Test that the gitsm fetcher does not use LFS when explicitly disabled"""
2437 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2438
2439 def fetch_and_verify(revision, filename, content):
2440 self.d.setVar('SRCREV', revision)
2441 fetcher, ud = self.fetch()
2442
2443 with hide_directory(submoduledir), hide_directory(self.srcdir):
2444 workdir = self.d.getVar('WORKDIR')
2445 fetcher.unpack(workdir)
2446
2447 with open(os.path.join(workdir, "git", filename)) as f:
2448 # Assume that LFS did not perform smudging when the expected content is
2449 # missing.
2450 self.assertNotEqual(f.read(), content)
2451
2452 # Create the git repository that will later be used as a submodule
2453 submoduledir = self.tempdir + "/submodule"
2454 bb.utils.mkdirhier(submoduledir)
2455 self.git_init(submoduledir)
2456 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2457 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2458
2459 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2460
2461 # Add the submodule to the repository at its current HEAD revision
2462 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2463 cwd=self.srcdir)
2464 base_commit_1 = self.commit()
2465
2466 # Add a LFS file to the repository
2467 base_commit_2 = self.commit_file("a.mp3", "version 1")
2468
2469 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master;lfs=0" % self.srcdir)
2470
2471 # Verify that LFS objects referenced from submodules are not fetched nor checked out
2472 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 1")
2473 # Verify that the LFS objects referenced from the base repository are not fetched nor
2474 # checked out
2475 fetch_and_verify(base_commit_2, "a.mp3", "version 1")
2476
2477 @skipIfNoGitLFS()
2308 def test_fetch_lfs_on_srcrev_change(self): 2478 def test_fetch_lfs_on_srcrev_change(self):
2309 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested""" 2479 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
2310 self.git(["lfs", "install", "--local"], cwd=self.srcdir) 2480 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2311 2481
2312 @contextlib.contextmanager
2313 def hide_upstream_repository():
2314 """Hide the upstream repository to make sure that git lfs cannot pull from it"""
2315 temp_name = self.srcdir + ".bak"
2316 os.rename(self.srcdir, temp_name)
2317 try:
2318 yield
2319 finally:
2320 os.rename(temp_name, self.srcdir)
2321
2322 def fetch_and_verify(revision, filename, content): 2482 def fetch_and_verify(revision, filename, content):
2323 self.d.setVar('SRCREV', revision) 2483 self.d.setVar('SRCREV', revision)
2324 fetcher, ud = self.fetch() 2484 fetcher, ud = self.fetch()
2325 2485
2326 with hide_upstream_repository(): 2486 with hide_directory(self.srcdir):
2327 workdir = self.d.getVar('WORKDIR') 2487 workdir = self.d.getVar('WORKDIR')
2328 fetcher.unpack(workdir) 2488 fetcher.unpack(workdir)
2329 2489
@@ -2375,8 +2535,6 @@ class GitLfsTest(FetcherTest):
2375 2535
2376 @skipIfNoGitLFS() 2536 @skipIfNoGitLFS()
2377 def test_lfs_enabled(self): 2537 def test_lfs_enabled(self):
2378 import shutil
2379
2380 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir 2538 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2381 self.d.setVar('SRC_URI', uri) 2539 self.d.setVar('SRC_URI', uri)
2382 2540
@@ -2387,8 +2545,6 @@ class GitLfsTest(FetcherTest):
2387 2545
2388 @skipIfNoGitLFS() 2546 @skipIfNoGitLFS()
2389 def test_lfs_disabled(self): 2547 def test_lfs_disabled(self):
2390 import shutil
2391
2392 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir 2548 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2393 self.d.setVar('SRC_URI', uri) 2549 self.d.setVar('SRC_URI', uri)
2394 2550
@@ -2397,58 +2553,76 @@ class GitLfsTest(FetcherTest):
2397 fetcher, ud = self.fetch() 2553 fetcher, ud = self.fetch()
2398 fetcher.unpack(self.d.getVar('WORKDIR')) 2554 fetcher.unpack(self.d.getVar('WORKDIR'))
2399 2555
2400 def test_lfs_enabled_not_installed(self): 2556 @skipIfNoGitLFS()
2401 import shutil 2557 def test_lfs_enabled_not_installed_during_unpack(self):
2558 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2559 self.d.setVar('SRC_URI', uri)
2560
2561 # Careful: suppress initial attempt at downloading
2562 fetcher, ud = self.fetch(uri=None, download=False)
2563
2564 fetcher.download()
2565 # If git-lfs cannot be found, the unpack should throw an error
2566 with self.assertRaises(bb.fetch2.FetchError):
2567 with unittest.mock.patch("shutil.which", return_value=None):
2568 shutil.rmtree(self.gitdir, ignore_errors=True)
2569 fetcher.unpack(self.d.getVar('WORKDIR'))
2402 2570
2571 def test_lfs_enabled_not_installed(self):
2403 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir 2572 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2404 self.d.setVar('SRC_URI', uri) 2573 self.d.setVar('SRC_URI', uri)
2405 2574
2406 # Careful: suppress initial attempt at downloading 2575 # Careful: suppress initial attempt at downloading
2407 fetcher, ud = self.fetch(uri=None, download=False) 2576 fetcher, ud = self.fetch(uri=None, download=False)
2408 2577
2409 # Artificially assert that git-lfs is not installed, so 2578 # If git-lfs cannot be found, the download should throw an error
2410 # we can verify a failure to unpack in it's absence. 2579 with unittest.mock.patch("shutil.which", return_value=None):
2411 old_find_git_lfs = ud.method._find_git_lfs
2412 try:
2413 # If git-lfs cannot be found, the unpack should throw an error
2414 with self.assertRaises(bb.fetch2.FetchError): 2580 with self.assertRaises(bb.fetch2.FetchError):
2415 fetcher.download() 2581 fetcher.download()
2416 ud.method._find_git_lfs = lambda d: False
2417 shutil.rmtree(self.gitdir, ignore_errors=True)
2418 fetcher.unpack(self.d.getVar('WORKDIR'))
2419 finally:
2420 ud.method._find_git_lfs = old_find_git_lfs
2421 2582
2422 def test_lfs_disabled_not_installed(self): 2583 def test_lfs_disabled_not_installed(self):
2423 import shutil
2424
2425 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir 2584 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2426 self.d.setVar('SRC_URI', uri) 2585 self.d.setVar('SRC_URI', uri)
2427 2586
2428 # Careful: suppress initial attempt at downloading 2587 # Careful: suppress initial attempt at downloading
2429 fetcher, ud = self.fetch(uri=None, download=False) 2588 fetcher, ud = self.fetch(uri=None, download=False)
2430 2589
2431 # Artificially assert that git-lfs is not installed, so 2590 # Even if git-lfs cannot be found, the download / unpack should be successful
2432 # we can verify a failure to unpack in it's absence. 2591 with unittest.mock.patch("shutil.which", return_value=None):
2433 old_find_git_lfs = ud.method._find_git_lfs 2592 fetcher.download()
2434 try: 2593 shutil.rmtree(self.gitdir, ignore_errors=True)
2435 # Even if git-lfs cannot be found, the unpack should be successful 2594 fetcher.unpack(self.d.getVar('WORKDIR'))
2595
2596 def test_lfs_enabled_not_installed_but_not_needed(self):
2597 srcdir = os.path.join(self.tempdir, "emptygit")
2598 bb.utils.mkdirhier(srcdir)
2599 self.git_init(srcdir)
2600 self.commit_file("test", "test content", cwd=srcdir)
2601
2602 uri = 'git://%s;protocol=file;lfs=1;branch=master' % srcdir
2603 self.d.setVar('SRC_URI', uri)
2604
2605 # Careful: suppress initial attempt at downloading
2606 fetcher, ud = self.fetch(uri=None, download=False)
2607
2608 # It shouldnt't matter that git-lfs cannot be found as the repository configuration does not
2609 # specify any LFS filters.
2610 with unittest.mock.patch("shutil.which", return_value=None):
2436 fetcher.download() 2611 fetcher.download()
2437 ud.method._find_git_lfs = lambda d: False
2438 shutil.rmtree(self.gitdir, ignore_errors=True) 2612 shutil.rmtree(self.gitdir, ignore_errors=True)
2439 fetcher.unpack(self.d.getVar('WORKDIR')) 2613 fetcher.unpack(self.d.getVar('WORKDIR'))
2440 finally:
2441 ud.method._find_git_lfs = old_find_git_lfs
2442 2614
2443class GitURLWithSpacesTest(FetcherTest): 2615class GitURLWithSpacesTest(FetcherTest):
2444 test_git_urls = { 2616 test_git_urls = {
2445 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : { 2617 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
2446 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master', 2618 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
2619 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
2447 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', 2620 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
2448 'path': '/tfs/example path/example.git' 2621 'path': '/tfs/example path/example.git'
2449 }, 2622 },
2450 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : { 2623 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
2451 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master', 2624 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
2625 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
2452 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', 2626 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
2453 'path': '/tfs/example path/example repo.git' 2627 'path': '/tfs/example path/example repo.git'
2454 } 2628 }
@@ -2471,6 +2645,7 @@ class GitURLWithSpacesTest(FetcherTest):
2471 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) 2645 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock'))
2472 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) 2646 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
2473 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) 2647 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
2648 self.assertEqual(ud.method._get_repo_url(ud), ref['repo_url'])
2474 2649
2475class CrateTest(FetcherTest): 2650class CrateTest(FetcherTest):
2476 @skipIfNoNetwork() 2651 @skipIfNoNetwork()
@@ -2592,7 +2767,6 @@ class CrateTest(FetcherTest):
2592 2767
2593class NPMTest(FetcherTest): 2768class NPMTest(FetcherTest):
2594 def skipIfNoNpm(): 2769 def skipIfNoNpm():
2595 import shutil
2596 if not shutil.which('npm'): 2770 if not shutil.which('npm'):
2597 return unittest.skip('npm not installed') 2771 return unittest.skip('npm not installed')
2598 return lambda f: f 2772 return lambda f: f
@@ -2600,8 +2774,8 @@ class NPMTest(FetcherTest):
2600 @skipIfNoNpm() 2774 @skipIfNoNpm()
2601 @skipIfNoNetwork() 2775 @skipIfNoNetwork()
2602 def test_npm(self): 2776 def test_npm(self):
2603 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2777 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2604 fetcher = bb.fetch.Fetch([url], self.d) 2778 fetcher = bb.fetch.Fetch(urls, self.d)
2605 ud = fetcher.ud[fetcher.urls[0]] 2779 ud = fetcher.ud[fetcher.urls[0]]
2606 fetcher.download() 2780 fetcher.download()
2607 self.assertTrue(os.path.exists(ud.localpath)) 2781 self.assertTrue(os.path.exists(ud.localpath))
@@ -2614,9 +2788,9 @@ class NPMTest(FetcherTest):
2614 @skipIfNoNpm() 2788 @skipIfNoNpm()
2615 @skipIfNoNetwork() 2789 @skipIfNoNetwork()
2616 def test_npm_bad_checksum(self): 2790 def test_npm_bad_checksum(self):
2617 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2791 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2618 # Fetch once to get a tarball 2792 # Fetch once to get a tarball
2619 fetcher = bb.fetch.Fetch([url], self.d) 2793 fetcher = bb.fetch.Fetch(urls, self.d)
2620 ud = fetcher.ud[fetcher.urls[0]] 2794 ud = fetcher.ud[fetcher.urls[0]]
2621 fetcher.download() 2795 fetcher.download()
2622 self.assertTrue(os.path.exists(ud.localpath)) 2796 self.assertTrue(os.path.exists(ud.localpath))
@@ -2633,9 +2807,9 @@ class NPMTest(FetcherTest):
2633 @skipIfNoNpm() 2807 @skipIfNoNpm()
2634 @skipIfNoNetwork() 2808 @skipIfNoNetwork()
2635 def test_npm_premirrors(self): 2809 def test_npm_premirrors(self):
2636 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2810 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2637 # Fetch once to get a tarball 2811 # Fetch once to get a tarball
2638 fetcher = bb.fetch.Fetch([url], self.d) 2812 fetcher = bb.fetch.Fetch(urls, self.d)
2639 ud = fetcher.ud[fetcher.urls[0]] 2813 ud = fetcher.ud[fetcher.urls[0]]
2640 fetcher.download() 2814 fetcher.download()
2641 self.assertTrue(os.path.exists(ud.localpath)) 2815 self.assertTrue(os.path.exists(ud.localpath))
@@ -2655,7 +2829,7 @@ class NPMTest(FetcherTest):
2655 # while the fetcher object exists, which it does when we rename the 2829 # while the fetcher object exists, which it does when we rename the
2656 # download directory to "mirror" above. Thus we need a new fetcher to go 2830 # download directory to "mirror" above. Thus we need a new fetcher to go
2657 # with the now empty download directory. 2831 # with the now empty download directory.
2658 fetcher = bb.fetch.Fetch([url], self.d) 2832 fetcher = bb.fetch.Fetch(urls, self.d)
2659 ud = fetcher.ud[fetcher.urls[0]] 2833 ud = fetcher.ud[fetcher.urls[0]]
2660 fetcher.download() 2834 fetcher.download()
2661 self.assertTrue(os.path.exists(ud.localpath)) 2835 self.assertTrue(os.path.exists(ud.localpath))
@@ -2663,9 +2837,9 @@ class NPMTest(FetcherTest):
2663 @skipIfNoNpm() 2837 @skipIfNoNpm()
2664 @skipIfNoNetwork() 2838 @skipIfNoNetwork()
2665 def test_npm_premirrors_with_specified_filename(self): 2839 def test_npm_premirrors_with_specified_filename(self):
2666 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2840 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2667 # Fetch once to get a tarball 2841 # Fetch once to get a tarball
2668 fetcher = bb.fetch.Fetch([url], self.d) 2842 fetcher = bb.fetch.Fetch(urls, self.d)
2669 ud = fetcher.ud[fetcher.urls[0]] 2843 ud = fetcher.ud[fetcher.urls[0]]
2670 fetcher.download() 2844 fetcher.download()
2671 self.assertTrue(os.path.exists(ud.localpath)) 2845 self.assertTrue(os.path.exists(ud.localpath))
@@ -2685,8 +2859,8 @@ class NPMTest(FetcherTest):
2685 @skipIfNoNetwork() 2859 @skipIfNoNetwork()
2686 def test_npm_mirrors(self): 2860 def test_npm_mirrors(self):
2687 # Fetch once to get a tarball 2861 # Fetch once to get a tarball
2688 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2862 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2689 fetcher = bb.fetch.Fetch([url], self.d) 2863 fetcher = bb.fetch.Fetch(urls, self.d)
2690 ud = fetcher.ud[fetcher.urls[0]] 2864 ud = fetcher.ud[fetcher.urls[0]]
2691 fetcher.download() 2865 fetcher.download()
2692 self.assertTrue(os.path.exists(ud.localpath)) 2866 self.assertTrue(os.path.exists(ud.localpath))
@@ -2710,8 +2884,8 @@ class NPMTest(FetcherTest):
2710 @skipIfNoNpm() 2884 @skipIfNoNpm()
2711 @skipIfNoNetwork() 2885 @skipIfNoNetwork()
2712 def test_npm_destsuffix_downloadfilename(self): 2886 def test_npm_destsuffix_downloadfilename(self):
2713 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' 2887 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz']
2714 fetcher = bb.fetch.Fetch([url], self.d) 2888 fetcher = bb.fetch.Fetch(urls, self.d)
2715 fetcher.download() 2889 fetcher.download()
2716 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz'))) 2890 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
2717 fetcher.unpack(self.unpackdir) 2891 fetcher.unpack(self.unpackdir)
@@ -2719,18 +2893,18 @@ class NPMTest(FetcherTest):
2719 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) 2893 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
2720 2894
2721 def test_npm_no_network_no_tarball(self): 2895 def test_npm_no_network_no_tarball(self):
2722 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2896 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2723 self.d.setVar('BB_NO_NETWORK', '1') 2897 self.d.setVar('BB_NO_NETWORK', '1')
2724 fetcher = bb.fetch.Fetch([url], self.d) 2898 fetcher = bb.fetch.Fetch(urls, self.d)
2725 with self.assertRaises(bb.fetch2.NetworkAccess): 2899 with self.assertRaises(bb.fetch2.NetworkAccess):
2726 fetcher.download() 2900 fetcher.download()
2727 2901
2728 @skipIfNoNpm() 2902 @skipIfNoNpm()
2729 @skipIfNoNetwork() 2903 @skipIfNoNetwork()
2730 def test_npm_no_network_with_tarball(self): 2904 def test_npm_no_network_with_tarball(self):
2731 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2905 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2732 # Fetch once to get a tarball 2906 # Fetch once to get a tarball
2733 fetcher = bb.fetch.Fetch([url], self.d) 2907 fetcher = bb.fetch.Fetch(urls, self.d)
2734 fetcher.download() 2908 fetcher.download()
2735 # Disable network access 2909 # Disable network access
2736 self.d.setVar('BB_NO_NETWORK', '1') 2910 self.d.setVar('BB_NO_NETWORK', '1')
@@ -2743,8 +2917,8 @@ class NPMTest(FetcherTest):
2743 @skipIfNoNpm() 2917 @skipIfNoNpm()
2744 @skipIfNoNetwork() 2918 @skipIfNoNetwork()
2745 def test_npm_registry_alternate(self): 2919 def test_npm_registry_alternate(self):
2746 url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0' 2920 urls = ['npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0']
2747 fetcher = bb.fetch.Fetch([url], self.d) 2921 fetcher = bb.fetch.Fetch(urls, self.d)
2748 fetcher.download() 2922 fetcher.download()
2749 fetcher.unpack(self.unpackdir) 2923 fetcher.unpack(self.unpackdir)
2750 unpackdir = os.path.join(self.unpackdir, 'npm') 2924 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2753,8 +2927,8 @@ class NPMTest(FetcherTest):
2753 @skipIfNoNpm() 2927 @skipIfNoNpm()
2754 @skipIfNoNetwork() 2928 @skipIfNoNetwork()
2755 def test_npm_version_latest(self): 2929 def test_npm_version_latest(self):
2756 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest' 2930 url = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest']
2757 fetcher = bb.fetch.Fetch([url], self.d) 2931 fetcher = bb.fetch.Fetch(url, self.d)
2758 fetcher.download() 2932 fetcher.download()
2759 fetcher.unpack(self.unpackdir) 2933 fetcher.unpack(self.unpackdir)
2760 unpackdir = os.path.join(self.unpackdir, 'npm') 2934 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2763,46 +2937,46 @@ class NPMTest(FetcherTest):
2763 @skipIfNoNpm() 2937 @skipIfNoNpm()
2764 @skipIfNoNetwork() 2938 @skipIfNoNetwork()
2765 def test_npm_registry_invalid(self): 2939 def test_npm_registry_invalid(self):
2766 url = 'npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2940 urls = ['npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2767 fetcher = bb.fetch.Fetch([url], self.d) 2941 fetcher = bb.fetch.Fetch(urls, self.d)
2768 with self.assertRaises(bb.fetch2.FetchError): 2942 with self.assertRaises(bb.fetch2.FetchError):
2769 fetcher.download() 2943 fetcher.download()
2770 2944
2771 @skipIfNoNpm() 2945 @skipIfNoNpm()
2772 @skipIfNoNetwork() 2946 @skipIfNoNetwork()
2773 def test_npm_package_invalid(self): 2947 def test_npm_package_invalid(self):
2774 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0' 2948 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0']
2775 fetcher = bb.fetch.Fetch([url], self.d) 2949 fetcher = bb.fetch.Fetch(urls, self.d)
2776 with self.assertRaises(bb.fetch2.FetchError): 2950 with self.assertRaises(bb.fetch2.FetchError):
2777 fetcher.download() 2951 fetcher.download()
2778 2952
2779 @skipIfNoNpm() 2953 @skipIfNoNpm()
2780 @skipIfNoNetwork() 2954 @skipIfNoNetwork()
2781 def test_npm_version_invalid(self): 2955 def test_npm_version_invalid(self):
2782 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid' 2956 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid']
2783 with self.assertRaises(bb.fetch2.ParameterError): 2957 with self.assertRaises(bb.fetch2.ParameterError):
2784 fetcher = bb.fetch.Fetch([url], self.d) 2958 fetcher = bb.fetch.Fetch(urls, self.d)
2785 2959
2786 @skipIfNoNpm() 2960 @skipIfNoNpm()
2787 @skipIfNoNetwork() 2961 @skipIfNoNetwork()
2788 def test_npm_registry_none(self): 2962 def test_npm_registry_none(self):
2789 url = 'npm://;package=@savoirfairelinux/node-server-example;version=1.0.0' 2963 urls = ['npm://;package=@savoirfairelinux/node-server-example;version=1.0.0']
2790 with self.assertRaises(bb.fetch2.MalformedUrl): 2964 with self.assertRaises(bb.fetch2.MalformedUrl):
2791 fetcher = bb.fetch.Fetch([url], self.d) 2965 fetcher = bb.fetch.Fetch(urls, self.d)
2792 2966
2793 @skipIfNoNpm() 2967 @skipIfNoNpm()
2794 @skipIfNoNetwork() 2968 @skipIfNoNetwork()
2795 def test_npm_package_none(self): 2969 def test_npm_package_none(self):
2796 url = 'npm://registry.npmjs.org;version=1.0.0' 2970 urls = ['npm://registry.npmjs.org;version=1.0.0']
2797 with self.assertRaises(bb.fetch2.MissingParameterError): 2971 with self.assertRaises(bb.fetch2.MissingParameterError):
2798 fetcher = bb.fetch.Fetch([url], self.d) 2972 fetcher = bb.fetch.Fetch(urls, self.d)
2799 2973
2800 @skipIfNoNpm() 2974 @skipIfNoNpm()
2801 @skipIfNoNetwork() 2975 @skipIfNoNetwork()
2802 def test_npm_version_none(self): 2976 def test_npm_version_none(self):
2803 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example' 2977 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example']
2804 with self.assertRaises(bb.fetch2.MissingParameterError): 2978 with self.assertRaises(bb.fetch2.MissingParameterError):
2805 fetcher = bb.fetch.Fetch([url], self.d) 2979 fetcher = bb.fetch.Fetch(urls, self.d)
2806 2980
2807 def create_shrinkwrap_file(self, data): 2981 def create_shrinkwrap_file(self, data):
2808 import json 2982 import json
@@ -2811,32 +2985,30 @@ class NPMTest(FetcherTest):
2811 bb.utils.mkdirhier(datadir) 2985 bb.utils.mkdirhier(datadir)
2812 with open(swfile, 'w') as f: 2986 with open(swfile, 'w') as f:
2813 json.dump(data, f) 2987 json.dump(data, f)
2814 # Also configure the S directory
2815 self.sdir = os.path.join(self.unpackdir, 'S')
2816 self.d.setVar('S', self.sdir)
2817 return swfile 2988 return swfile
2818 2989
2819 @skipIfNoNpm()
2820 @skipIfNoNetwork() 2990 @skipIfNoNetwork()
2821 def test_npmsw(self): 2991 def test_npmsw(self):
2822 swfile = self.create_shrinkwrap_file({ 2992 swfile = self.create_shrinkwrap_file({
2823 'dependencies': { 2993 'packages': {
2824 'array-flatten': { 2994 'node_modules/array-flatten': {
2825 'version': '1.1.1', 2995 'version': '1.1.1',
2826 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 2996 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2827 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', 2997 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=',
2828 'dependencies': { 2998 'dependencies': {
2829 'content-type': { 2999 'content-type': "1.0.4"
2830 'version': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', 3000 }
2831 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', 3001 },
2832 'dependencies': { 3002 'node_modules/array-flatten/node_modules/content-type': {
2833 'cookie': { 3003 'version': '1.0.4',
2834 'version': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', 3004 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2835 'from': 'git+https://github.com/jshttp/cookie.git' 3005 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
2836 } 3006 'dependencies': {
2837 } 3007 'cookie': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2838 }
2839 } 3008 }
3009 },
3010 'node_modules/array-flatten/node_modules/content-type/node_modules/cookie': {
3011 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2840 } 3012 }
2841 } 3013 }
2842 }) 3014 })
@@ -2846,31 +3018,17 @@ class NPMTest(FetcherTest):
2846 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3018 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2847 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3019 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2848 fetcher.unpack(self.unpackdir) 3020 fetcher.unpack(self.unpackdir)
2849 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'npm-shrinkwrap.json'))) 3021 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm-shrinkwrap.json')))
2850 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3022 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2851 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) 3023 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json')))
2852 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) 3024 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json')))
2853 3025
2854 @skipIfNoNpm()
2855 @skipIfNoNetwork() 3026 @skipIfNoNetwork()
2856 def test_npmsw_git(self): 3027 def test_npmsw_git(self):
2857 swfile = self.create_shrinkwrap_file({ 3028 swfile = self.create_shrinkwrap_file({
2858 'dependencies': { 3029 'packages': {
2859 'cookie': { 3030 'node_modules/cookie': {
2860 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09', 3031 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2861 'from': 'github:jshttp/cookie.git'
2862 }
2863 }
2864 })
2865 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2866 fetcher.download()
2867 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2868
2869 swfile = self.create_shrinkwrap_file({
2870 'dependencies': {
2871 'cookie': {
2872 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2873 'from': 'jshttp/cookie.git'
2874 } 3032 }
2875 } 3033 }
2876 }) 3034 })
@@ -2878,29 +3036,16 @@ class NPMTest(FetcherTest):
2878 fetcher.download() 3036 fetcher.download()
2879 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3037 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2880 3038
2881 swfile = self.create_shrinkwrap_file({
2882 'dependencies': {
2883 'nodejs': {
2884 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
2885 'from': 'gitlab:gitlab-examples/nodejs'
2886 }
2887 }
2888 })
2889 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2890 fetcher.download()
2891 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
2892
2893 @skipIfNoNpm()
2894 @skipIfNoNetwork() 3039 @skipIfNoNetwork()
2895 def test_npmsw_dev(self): 3040 def test_npmsw_dev(self):
2896 swfile = self.create_shrinkwrap_file({ 3041 swfile = self.create_shrinkwrap_file({
2897 'dependencies': { 3042 'packages': {
2898 'array-flatten': { 3043 'node_modules/array-flatten': {
2899 'version': '1.1.1', 3044 'version': '1.1.1',
2900 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3045 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2901 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3046 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
2902 }, 3047 },
2903 'content-type': { 3048 'node_modules/content-type': {
2904 'version': '1.0.4', 3049 'version': '1.0.4',
2905 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', 3050 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2906 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', 3051 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
@@ -2919,12 +3064,11 @@ class NPMTest(FetcherTest):
2919 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) 3064 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
2920 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3065 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2921 3066
2922 @skipIfNoNpm()
2923 @skipIfNoNetwork() 3067 @skipIfNoNetwork()
2924 def test_npmsw_destsuffix(self): 3068 def test_npmsw_destsuffix(self):
2925 swfile = self.create_shrinkwrap_file({ 3069 swfile = self.create_shrinkwrap_file({
2926 'dependencies': { 3070 'packages': {
2927 'array-flatten': { 3071 'node_modules/array-flatten': {
2928 'version': '1.1.1', 3072 'version': '1.1.1',
2929 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3073 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2930 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3074 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2938,8 +3082,8 @@ class NPMTest(FetcherTest):
2938 3082
2939 def test_npmsw_no_network_no_tarball(self): 3083 def test_npmsw_no_network_no_tarball(self):
2940 swfile = self.create_shrinkwrap_file({ 3084 swfile = self.create_shrinkwrap_file({
2941 'dependencies': { 3085 'packages': {
2942 'array-flatten': { 3086 'node_modules/array-flatten': {
2943 'version': '1.1.1', 3087 'version': '1.1.1',
2944 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3088 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2945 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3089 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2961,8 +3105,8 @@ class NPMTest(FetcherTest):
2961 self.d.setVar('BB_NO_NETWORK', '1') 3105 self.d.setVar('BB_NO_NETWORK', '1')
2962 # Fetch again 3106 # Fetch again
2963 swfile = self.create_shrinkwrap_file({ 3107 swfile = self.create_shrinkwrap_file({
2964 'dependencies': { 3108 'packages': {
2965 'array-flatten': { 3109 'node_modules/array-flatten': {
2966 'version': '1.1.1', 3110 'version': '1.1.1',
2967 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3111 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2968 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3112 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2972,15 +3116,14 @@ class NPMTest(FetcherTest):
2972 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3116 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2973 fetcher.download() 3117 fetcher.download()
2974 fetcher.unpack(self.unpackdir) 3118 fetcher.unpack(self.unpackdir)
2975 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3119 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2976 3120
2977 @skipIfNoNpm()
2978 @skipIfNoNetwork() 3121 @skipIfNoNetwork()
2979 def test_npmsw_npm_reusability(self): 3122 def test_npmsw_npm_reusability(self):
2980 # Fetch once with npmsw 3123 # Fetch once with npmsw
2981 swfile = self.create_shrinkwrap_file({ 3124 swfile = self.create_shrinkwrap_file({
2982 'dependencies': { 3125 'packages': {
2983 'array-flatten': { 3126 'node_modules/array-flatten': {
2984 'version': '1.1.1', 3127 'version': '1.1.1',
2985 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3128 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2986 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3129 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2997,13 +3140,12 @@ class NPMTest(FetcherTest):
2997 fetcher.unpack(self.unpackdir) 3140 fetcher.unpack(self.unpackdir)
2998 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) 3141 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json')))
2999 3142
3000 @skipIfNoNpm()
3001 @skipIfNoNetwork() 3143 @skipIfNoNetwork()
3002 def test_npmsw_bad_checksum(self): 3144 def test_npmsw_bad_checksum(self):
3003 # Try to fetch with bad checksum 3145 # Try to fetch with bad checksum
3004 swfile = self.create_shrinkwrap_file({ 3146 swfile = self.create_shrinkwrap_file({
3005 'dependencies': { 3147 'packages': {
3006 'array-flatten': { 3148 'node_modules/array-flatten': {
3007 'version': '1.1.1', 3149 'version': '1.1.1',
3008 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3150 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3009 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' 3151 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg='
@@ -3015,8 +3157,8 @@ class NPMTest(FetcherTest):
3015 fetcher.download() 3157 fetcher.download()
3016 # Fetch correctly to get a tarball 3158 # Fetch correctly to get a tarball
3017 swfile = self.create_shrinkwrap_file({ 3159 swfile = self.create_shrinkwrap_file({
3018 'dependencies': { 3160 'packages': {
3019 'array-flatten': { 3161 'node_modules/array-flatten': {
3020 'version': '1.1.1', 3162 'version': '1.1.1',
3021 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3163 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3022 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3164 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3054,8 +3196,8 @@ class NPMTest(FetcherTest):
3054 # Fetch again 3196 # Fetch again
3055 self.assertFalse(os.path.exists(ud.localpath)) 3197 self.assertFalse(os.path.exists(ud.localpath))
3056 swfile = self.create_shrinkwrap_file({ 3198 swfile = self.create_shrinkwrap_file({
3057 'dependencies': { 3199 'packages': {
3058 'array-flatten': { 3200 'node_modules/array-flatten': {
3059 'version': '1.1.1', 3201 'version': '1.1.1',
3060 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3202 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3061 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3203 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3082,8 +3224,8 @@ class NPMTest(FetcherTest):
3082 # Fetch again with invalid url 3224 # Fetch again with invalid url
3083 self.assertFalse(os.path.exists(ud.localpath)) 3225 self.assertFalse(os.path.exists(ud.localpath))
3084 swfile = self.create_shrinkwrap_file({ 3226 swfile = self.create_shrinkwrap_file({
3085 'dependencies': { 3227 'packages': {
3086 'array-flatten': { 3228 'node_modules/array-flatten': {
3087 'version': '1.1.1', 3229 'version': '1.1.1',
3088 'resolved': 'https://invalid', 3230 'resolved': 'https://invalid',
3089 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3231 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -3094,6 +3236,28 @@ class NPMTest(FetcherTest):
3094 fetcher.download() 3236 fetcher.download()
3095 self.assertTrue(os.path.exists(ud.localpath)) 3237 self.assertTrue(os.path.exists(ud.localpath))
3096 3238
3239 @skipIfNoNetwork()
3240 def test_npmsw_bundled(self):
3241 swfile = self.create_shrinkwrap_file({
3242 'packages': {
3243 'node_modules/array-flatten': {
3244 'version': '1.1.1',
3245 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3246 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
3247 },
3248 'node_modules/content-type': {
3249 'version': '1.0.4',
3250 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3251 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3252 'inBundle': True
3253 }
3254 }
3255 })
3256 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3257 fetcher.download()
3258 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
3259 self.assertFalse(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
3260
3097class GitSharedTest(FetcherTest): 3261class GitSharedTest(FetcherTest):
3098 def setUp(self): 3262 def setUp(self):
3099 super(GitSharedTest, self).setUp() 3263 super(GitSharedTest, self).setUp()
@@ -3121,6 +3285,72 @@ class GitSharedTest(FetcherTest):
3121 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates') 3285 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3122 self.assertFalse(os.path.exists(alt)) 3286 self.assertFalse(os.path.exists(alt))
3123 3287
3288class GitTagVerificationTests(FetcherTest):
3289
3290 @skipIfNoNetwork()
3291 def test_tag_rev_match(self):
3292 # Test a url with rev= and tag= set works
3293 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3294 fetcher.download()
3295 fetcher.unpack(self.unpackdir)
3296
3297 def test_annotated_tag_rev_match(self):
3298 # Test a url with rev= and tag= set works
3299 # rev is the annotated tag revision in this case
3300 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=6d363159e4b7dc566fc40d069b2615e61774a7d8;tag=2.8.7"], self.d)
3301 fetcher.download()
3302 fetcher.unpack(self.unpackdir)
3303
3304 @skipIfNoNetwork()
3305 def test_tag_rev_match2(self):
3306 # Test a url with SRCREV and tag= set works
3307 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3308 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;tag=2.8.7"], self.d)
3309 fetcher.download()
3310 fetcher.unpack(self.unpackdir)
3311
3312 @skipIfNoNetwork()
3313 def test_tag_rev_match3(self):
3314 # Test a url with SRCREV, rev= and tag= set works
3315 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3316 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3317 fetcher.download()
3318 fetcher.unpack(self.unpackdir)
3319
3320 @skipIfNoNetwork()
3321 def test_tag_rev_match4(self):
3322 # Test a url with SRCREV and rev= mismatching errors
3323 self.d.setVar('SRCREV', 'bade540fc31a1c26839efd2c7785a751ce24ebfb')
3324 with self.assertRaises(bb.fetch2.FetchError):
3325 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3326
3327 @skipIfNoNetwork()
3328 def test_tag_rev_match5(self):
3329 # Test a url with SRCREV, rev= and tag= set works when using shallow clones
3330 self.d.setVar('BB_GIT_SHALLOW', '1')
3331 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3332 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3333 fetcher.download()
3334 fetcher.unpack(self.unpackdir)
3335
3336 @skipIfNoNetwork()
3337 def test_tag_rev_match6(self):
3338 # Test a url with SRCREV, rev= and a mismatched tag= when using shallow clones
3339 self.d.setVar('BB_GIT_SHALLOW', '1')
3340 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3341 fetcher.download()
3342 with self.assertRaises(bb.fetch2.FetchError):
3343 fetcher.unpack(self.unpackdir)
3344
3345 @skipIfNoNetwork()
3346 def test_tag_rev_match7(self):
3347 # Test a url with SRCREV, rev= and a mismatched tag=
3348 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3349 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3350 fetcher.download()
3351 with self.assertRaises(bb.fetch2.FetchError):
3352 fetcher.unpack(self.unpackdir)
3353
3124 3354
3125class FetchPremirroronlyLocalTest(FetcherTest): 3355class FetchPremirroronlyLocalTest(FetcherTest):
3126 3356
@@ -3203,58 +3433,6 @@ class FetchPremirroronlyLocalTest(FetcherTest):
3203 with self.assertRaises(bb.fetch2.NetworkAccess): 3433 with self.assertRaises(bb.fetch2.NetworkAccess):
3204 fetcher.download() 3434 fetcher.download()
3205 3435
3206 def test_mirror_tarball_multiple_branches(self):
3207 """
3208 test if PREMIRRORS can handle multiple name/branches correctly
3209 both branches have required revisions
3210 """
3211 self.make_git_repo()
3212 branch1rev = self.git_new_branch("testbranch1")
3213 branch2rev = self.git_new_branch("testbranch2")
3214 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3215 self.d.setVar("SRCREV_branch1", branch1rev)
3216 self.d.setVar("SRCREV_branch2", branch2rev)
3217 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3218 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3219 fetcher.download()
3220 fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
3221 unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
3222 self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
3223 with open(unpacked, 'r') as f:
3224 content = f.read()
3225 ## We expect to see testbranch1 in the file, not master, not testbranch2
3226 self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
3227
3228 def test_mirror_tarball_multiple_branches_nobranch(self):
3229 """
3230 test if PREMIRRORS can handle multiple name/branches correctly
3231 Unbalanced name/branches raises ParameterError
3232 """
3233 self.make_git_repo()
3234 branch1rev = self.git_new_branch("testbranch1")
3235 branch2rev = self.git_new_branch("testbranch2")
3236 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
3237 self.d.setVar("SRCREV_branch1", branch1rev)
3238 self.d.setVar("SRCREV_branch2", branch2rev)
3239 with self.assertRaises(bb.fetch2.ParameterError):
3240 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3241
3242 def test_mirror_tarball_multiple_branches_norev(self):
3243 """
3244 test if PREMIRRORS can handle multiple name/branches correctly
3245 one of the branches specifies non existing SRCREV
3246 """
3247 self.make_git_repo()
3248 branch1rev = self.git_new_branch("testbranch1")
3249 branch2rev = self.git_new_branch("testbranch2")
3250 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3251 self.d.setVar("SRCREV_branch1", branch1rev)
3252 self.d.setVar("SRCREV_branch2", "0"*40)
3253 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3254 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3255 with self.assertRaises(bb.fetch2.NetworkAccess):
3256 fetcher.download()
3257
3258 3436
3259class FetchPremirroronlyNetworkTest(FetcherTest): 3437class FetchPremirroronlyNetworkTest(FetcherTest):
3260 3438
@@ -3265,16 +3443,16 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3265 self.reponame = "fstests" 3443 self.reponame = "fstests"
3266 self.clonedir = os.path.join(self.tempdir, "git") 3444 self.clonedir = os.path.join(self.tempdir, "git")
3267 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame)) 3445 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
3268 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https" 3446 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https;branch=master"
3269 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") 3447 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3270 self.d.setVar("BB_NO_NETWORK", "0") 3448 self.d.setVar("BB_NO_NETWORK", "0")
3271 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") 3449 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3272 3450
3273 def make_git_repo(self): 3451 def make_git_repo(self):
3274 import shutil
3275 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz" 3452 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
3276 os.makedirs(self.clonedir) 3453 os.makedirs(self.clonedir)
3277 self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir) 3454 self.git("clone --bare {}".format(self.recipe_url), self.clonedir)
3455 self.git("update-ref HEAD 15413486df1f5a5b5af699b6f3ba5f0984e52a9f", self.gitdir)
3278 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir) 3456 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3279 shutil.rmtree(self.clonedir) 3457 shutil.rmtree(self.clonedir)
3280 3458
@@ -3282,7 +3460,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3282 def test_mirror_tarball_updated(self): 3460 def test_mirror_tarball_updated(self):
3283 self.make_git_repo() 3461 self.make_git_repo()
3284 ## Upstream commit is in the mirror 3462 ## Upstream commit is in the mirror
3285 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec") 3463 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
3286 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3464 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3287 fetcher.download() 3465 fetcher.download()
3288 3466
@@ -3290,7 +3468,7 @@ class FetchPremirroronlyNetworkTest(FetcherTest):
3290 def test_mirror_tarball_outdated(self): 3468 def test_mirror_tarball_outdated(self):
3291 self.make_git_repo() 3469 self.make_git_repo()
3292 ## Upstream commit not in the mirror 3470 ## Upstream commit not in the mirror
3293 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f") 3471 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
3294 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3472 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3295 with self.assertRaises(bb.fetch2.NetworkAccess): 3473 with self.assertRaises(bb.fetch2.NetworkAccess):
3296 fetcher.download() 3474 fetcher.download()
@@ -3300,7 +3478,6 @@ class FetchPremirroronlyMercurialTest(FetcherTest):
3300 the test covers also basic hg:// clone (see fetch_and_create_tarball 3478 the test covers also basic hg:// clone (see fetch_and_create_tarball
3301 """ 3479 """
3302 def skipIfNoHg(): 3480 def skipIfNoHg():
3303 import shutil
3304 if not shutil.which('hg'): 3481 if not shutil.which('hg'):
3305 return unittest.skip('Mercurial not installed') 3482 return unittest.skip('Mercurial not installed')
3306 return lambda f: f 3483 return lambda f: f
@@ -3347,7 +3524,7 @@ class FetchPremirroronlyBrokenTarball(FetcherTest):
3347 os.mkdir(self.mirrordir) 3524 os.mkdir(self.mirrordir)
3348 self.reponame = "bitbake" 3525 self.reponame = "bitbake"
3349 self.gitdir = os.path.join(self.tempdir, "git", self.reponame) 3526 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3350 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https" 3527 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https;branch=master"
3351 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1") 3528 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3352 self.d.setVar("BB_NO_NETWORK", "1") 3529 self.d.setVar("BB_NO_NETWORK", "1")
3353 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n") 3530 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
@@ -3356,10 +3533,223 @@ class FetchPremirroronlyBrokenTarball(FetcherTest):
3356 targz.write("This is not tar.gz file!") 3533 targz.write("This is not tar.gz file!")
3357 3534
3358 def test_mirror_broken_download(self): 3535 def test_mirror_broken_download(self):
3359 import sys
3360 self.d.setVar("SRCREV", "0"*40) 3536 self.d.setVar("SRCREV", "0"*40)
3361 fetcher = bb.fetch.Fetch([self.recipe_url], self.d) 3537 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3362 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs: 3538 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
3363 fetcher.download() 3539 fetcher.download()
3364 output = "".join(logs.output) 3540 output = "".join(logs.output)
3365 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output) 3541 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
3542
3543class GoModTest(FetcherTest):
3544
3545 @skipIfNoNetwork()
3546 def test_gomod_url(self):
3547 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3548 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad']
3549
3550 fetcher = bb.fetch2.Fetch(urls, self.d)
3551 ud = fetcher.ud[urls[0]]
3552 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip')
3553 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.zip')
3554 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3555
3556 fetcher.download()
3557 fetcher.unpack(self.unpackdir)
3558 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3559 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3560 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3561 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3562 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3563
3564 @skipIfNoNetwork()
3565 def test_gomod_url_go_mod_only(self):
3566 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;'
3567 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873']
3568
3569 fetcher = bb.fetch2.Fetch(urls, self.d)
3570 ud = fetcher.ud[urls[0]]
3571 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod')
3572 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.mod')
3573 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3574
3575 fetcher.download()
3576 fetcher.unpack(self.unpackdir)
3577 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3578 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3579
3580 @skipIfNoNetwork()
3581 def test_gomod_url_sha256sum_varflag(self):
3582 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0']
3583 self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6')
3584
3585 fetcher = bb.fetch2.Fetch(urls, self.d)
3586 ud = fetcher.ud[urls[0]]
3587 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3588 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3589 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3590
3591 fetcher.download()
3592 fetcher.unpack(self.unpackdir)
3593 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3594 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3595 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3596 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3597 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3598
3599 @skipIfNoNetwork()
3600 def test_gomod_url_no_go_mod_in_module(self):
3601 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;'
3602 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6']
3603
3604 fetcher = bb.fetch2.Fetch(urls, self.d)
3605 ud = fetcher.ud[urls[0]]
3606 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3607 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3608 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3609
3610 fetcher.download()
3611 fetcher.unpack(self.unpackdir)
3612 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3613 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3614 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3615 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3616 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3617
3618 @skipIfNoNetwork()
3619 def test_gomod_url_host_only(self):
3620 urls = ['gomod://go.opencensus.io;version=v0.24.0;'
3621 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c']
3622
3623 fetcher = bb.fetch2.Fetch(urls, self.d)
3624 ud = fetcher.ud[urls[0]]
3625 self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip')
3626 self.assertEqual(ud.parm['downloadfilename'], 'go.opencensus.io@v0.24.0.zip')
3627 self.assertEqual(ud.parm['name'], 'go.opencensus.io@v0.24.0')
3628
3629 fetcher.download()
3630 fetcher.unpack(self.unpackdir)
3631 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3632 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3633 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3634 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3635 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
3636
3637class GoModGitTest(FetcherTest):
3638
3639 @skipIfNoNetwork()
3640 def test_gomodgit_url_repo(self):
3641 urls = ['gomodgit://golang.org/x/net;version=v0.9.0;'
3642 'repo=go.googlesource.com/net;'
3643 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1']
3644
3645 fetcher = bb.fetch2.Fetch(urls, self.d)
3646 ud = fetcher.ud[urls[0]]
3647 self.assertEqual(ud.host, 'go.googlesource.com')
3648 self.assertEqual(ud.path, '/net')
3649 self.assertEqual(ud.name, 'golang.org/x/net@v0.9.0')
3650 self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1')
3651
3652 fetcher.download()
3653 self.assertTrue(os.path.exists(ud.localpath))
3654
3655 fetcher.unpack(self.unpackdir)
3656 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3657 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca')))
3658 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3659 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip')))
3660 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')))
3661 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')),
3662 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e')
3663
3664 @skipIfNoNetwork()
3665 def test_gomodgit_url_subdir(self):
3666 urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3667 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;'
3668 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3']
3669
3670 fetcher = bb.fetch2.Fetch(urls, self.d)
3671 ud = fetcher.ud[urls[0]]
3672 self.assertEqual(ud.host, 'github.com')
3673 self.assertEqual(ud.path, '/Azure/azure-sdk-for-go')
3674 self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob')
3675 self.assertEqual(ud.name, 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3676 self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3')
3677
3678 fetcher.download()
3679 self.assertTrue(os.path.exists(ud.localpath))
3680
3681 fetcher.unpack(self.unpackdir)
3682 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3683 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3')))
3684 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3685 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3686 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3687 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3688 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3689
3690 @skipIfNoNetwork()
3691 def test_gomodgit_url_srcrev_var(self):
3692 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0']
3693 self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3694
3695 fetcher = bb.fetch2.Fetch(urls, self.d)
3696 ud = fetcher.ud[urls[0]]
3697 self.assertEqual(ud.host, 'gopkg.in')
3698 self.assertEqual(ud.path, '/ini.v1')
3699 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3700 self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3701
3702 fetcher.download()
3703 fetcher.unpack(self.unpackdir)
3704 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3705 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3706 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3707 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3708 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3709 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3710 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3711
3712 @skipIfNoNetwork()
3713 def test_gomodgit_url_no_go_mod_in_module(self):
3714 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;'
3715 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951']
3716
3717 fetcher = bb.fetch2.Fetch(urls, self.d)
3718 ud = fetcher.ud[urls[0]]
3719 self.assertEqual(ud.host, 'gopkg.in')
3720 self.assertEqual(ud.path, '/ini.v1')
3721 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3722 self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3723
3724 fetcher.download()
3725 fetcher.unpack(self.unpackdir)
3726 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3727 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3728 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3729 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3730 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3731 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3732 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3733
3734 @skipIfNoNetwork()
3735 def test_gomodgit_url_host_only(self):
3736 urls = ['gomodgit://go.opencensus.io;version=v0.24.0;'
3737 'repo=github.com/census-instrumentation/opencensus-go;'
3738 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5']
3739
3740 fetcher = bb.fetch2.Fetch(urls, self.d)
3741 ud = fetcher.ud[urls[0]]
3742 self.assertEqual(ud.host, 'github.com')
3743 self.assertEqual(ud.path, '/census-instrumentation/opencensus-go')
3744 self.assertEqual(ud.name, 'go.opencensus.io@v0.24.0')
3745 self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5')
3746
3747 fetcher.download()
3748 fetcher.unpack(self.unpackdir)
3749 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3750 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1')))
3751 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3752 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3753 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3754 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3755 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 72d1962e7e..e3cba67ad4 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -75,6 +75,59 @@ unset B[flag]
75 self.assertEqual(d.getVarFlag("A","flag"), None) 75 self.assertEqual(d.getVarFlag("A","flag"), None)
76 self.assertEqual(d.getVar("B"), "2") 76 self.assertEqual(d.getVar("B"), "2")
77 77
78 defaulttest = """
79A = "set value"
80A ??= "default value"
81
82A[flag_set_vs_question] = "set flag"
83A[flag_set_vs_question] ?= "question flag"
84
85A[flag_set_vs_default] = "set flag"
86A[flag_set_vs_default] ??= "default flag"
87
88A[flag_question] ?= "question flag"
89
90A[flag_default] ??= "default flag"
91
92A[flag_question_vs_default] ?= "question flag"
93A[flag_question_vs_default] ??= "default flag"
94
95A[flag_default_vs_question] ??= "default flag"
96A[flag_default_vs_question] ?= "question flag"
97
98A[flag_set_question_default] = "set flag"
99A[flag_set_question_default] ?= "question flag"
100A[flag_set_question_default] ??= "default flag"
101
102A[flag_set_default_question] = "set flag"
103A[flag_set_default_question] ??= "default flag"
104A[flag_set_default_question] ?= "question flag"
105
106A[flag_set_twice] = "set flag first"
107A[flag_set_twice] = "set flag second"
108
109A[flag_question_twice] ?= "question flag first"
110A[flag_question_twice] ?= "question flag second"
111
112A[flag_default_twice] ??= "default flag first"
113A[flag_default_twice] ??= "default flag second"
114"""
115 def test_parse_defaulttest(self):
116 f = self.parsehelper(self.defaulttest)
117 d = bb.parse.handle(f.name, self.d)['']
118 self.assertEqual(d.getVar("A"), "set value")
119 self.assertEqual(d.getVarFlag("A","flag_set_vs_question"), "set flag")
120 self.assertEqual(d.getVarFlag("A","flag_set_vs_default"), "set flag")
121 self.assertEqual(d.getVarFlag("A","flag_question"), "question flag")
122 self.assertEqual(d.getVarFlag("A","flag_default"), "default flag")
123 self.assertEqual(d.getVarFlag("A","flag_question_vs_default"), "question flag")
124 self.assertEqual(d.getVarFlag("A","flag_default_vs_question"), "question flag")
125 self.assertEqual(d.getVarFlag("A","flag_set_question_default"), "set flag")
126 self.assertEqual(d.getVarFlag("A","flag_set_default_question"), "set flag")
127 self.assertEqual(d.getVarFlag("A","flag_set_twice"), "set flag second")
128 self.assertEqual(d.getVarFlag("A","flag_question_twice"), "question flag first")
129 self.assertEqual(d.getVarFlag("A","flag_default_twice"), "default flag second")
130
78 exporttest = """ 131 exporttest = """
79A = "a" 132A = "a"
80export B = "b" 133export B = "b"
@@ -177,7 +230,19 @@ python () {
177 230
178 addtask_deltask = """ 231 addtask_deltask = """
179addtask do_patch after do_foo after do_unpack before do_configure before do_compile 232addtask do_patch after do_foo after do_unpack before do_configure before do_compile
180addtask do_fetch do_patch 233addtask do_fetch2 do_patch2
234
235addtask do_myplaintask
236addtask do_myplaintask2
237deltask do_myplaintask2
238addtask do_mytask# comment
239addtask do_mytask2 # comment2
240addtask do_mytask3
241deltask do_mytask3# comment
242deltask do_mytask4 # comment2
243
244# Ensure a missing task prefix on after works
245addtask do_mytask5 after mytask
181 246
182MYVAR = "do_patch" 247MYVAR = "do_patch"
183EMPTYVAR = "" 248EMPTYVAR = ""
@@ -185,17 +250,12 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR}
185deltask ${EMPTYVAR} 250deltask ${EMPTYVAR}
186""" 251"""
187 def test_parse_addtask_deltask(self): 252 def test_parse_addtask_deltask(self):
188 import sys
189 253
190 with self.assertLogs() as logs: 254 f = self.parsehelper(self.addtask_deltask)
191 f = self.parsehelper(self.addtask_deltask) 255 d = bb.parse.handle(f.name, self.d)['']
192 d = bb.parse.handle(f.name, self.d)['']
193 256
194 output = "".join(logs.output) 257 self.assertSequenceEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], bb.build.listtasks(d))
195 self.assertTrue("addtask contained multiple 'before' keywords" in output) 258 self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps"))
196 self.assertTrue("addtask contained multiple 'after' keywords" in output)
197 self.assertTrue('addtask ignored: " do_patch"' in output)
198 #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
199 259
200 broken_multiline_comment = """ 260 broken_multiline_comment = """
201# First line of comment \\ 261# First line of comment \\
@@ -341,3 +401,65 @@ EXPORT_FUNCTIONS do_compile do_compilepython
341 self.assertIn("else", d.getVar("do_compilepython")) 401 self.assertIn("else", d.getVar("do_compilepython"))
342 check_function_flags(d) 402 check_function_flags(d)
343 403
404 export_function_unclosed_tab = """
405do_compile () {
406 bb.note("Something")
407\t}
408"""
409 export_function_unclosed_space = """
410do_compile () {
411 bb.note("Something")
412 }
413"""
414 export_function_residue = """
415do_compile () {
416 bb.note("Something")
417}
418
419include \\
420"""
421
422 def test_unclosed_functions(self):
423 def test_helper(content, expected_error):
424 with tempfile.TemporaryDirectory() as tempdir:
425 recipename = tempdir + "/recipe_unclosed.bb"
426 with open(recipename, "w") as f:
427 f.write(content)
428 f.flush()
429 os.chdir(tempdir)
430 with self.assertRaises(bb.parse.ParseError) as error:
431 bb.parse.handle(recipename, bb.data.createCopy(self.d))
432 self.assertIn(expected_error, str(error.exception))
433
434 with tempfile.TemporaryDirectory() as tempdir:
435 test_helper(self.export_function_unclosed_tab, "Unparsed lines from unclosed function")
436 test_helper(self.export_function_unclosed_space, "Unparsed lines from unclosed function")
437 test_helper(self.export_function_residue, "Unparsed lines")
438
439 recipename_closed = tempdir + "/recipe_closed.bb"
440 with open(recipename_closed, "w") as in_file:
441 lines = self.export_function_unclosed_tab.split("\n")
442 lines[3] = "}"
443 in_file.write("\n".join(lines))
444 in_file.flush()
445 bb.parse.handle(recipename_closed, bb.data.createCopy(self.d))
446
447 special_character_assignment = """
448A+="a"
449A+ = "b"
450+ = "c"
451"""
452 ambigous_assignment = """
453+= "d"
454"""
455 def test_parse_special_character_assignment(self):
456 f = self.parsehelper(self.special_character_assignment)
457 d = bb.parse.handle(f.name, self.d)['']
458 self.assertEqual(d.getVar("A"), " a")
459 self.assertEqual(d.getVar("A+"), "b")
460 self.assertEqual(d.getVar("+"), "c")
461
462 f = self.parsehelper(self.ambigous_assignment)
463 with self.assertRaises(bb.parse.ParseError) as error:
464 bb.parse.handle(f.name, self.d)
465 self.assertIn("Empty variable name in assignment", str(error.exception))
diff --git a/bitbake/lib/bb/tests/persist_data.py b/bitbake/lib/bb/tests/persist_data.py
deleted file mode 100644
index f641b5acbc..0000000000
--- a/bitbake/lib/bb/tests/persist_data.py
+++ /dev/null
@@ -1,129 +0,0 @@
1#
2# BitBake Test for lib/bb/persist_data/
3#
4# Copyright (C) 2018 Garmin Ltd.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import unittest
10import bb.data
11import bb.persist_data
12import tempfile
13import threading
14
15class PersistDataTest(unittest.TestCase):
16 def _create_data(self):
17 return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
18
19 def setUp(self):
20 self.d = bb.data.init()
21 self.tempdir = tempfile.TemporaryDirectory()
22 self.d['PERSISTENT_DIR'] = self.tempdir.name
23 self.data = self._create_data()
24 self.items = {
25 'A1': '1',
26 'B1': '2',
27 'C2': '3'
28 }
29 self.stress_count = 10000
30 self.thread_count = 5
31
32 for k,v in self.items.items():
33 self.data[k] = v
34
35 def tearDown(self):
36 self.tempdir.cleanup()
37
38 def _iter_helper(self, seen, iterator):
39 with iter(iterator):
40 for v in iterator:
41 self.assertTrue(v in seen)
42 seen.remove(v)
43 self.assertEqual(len(seen), 0, '%s not seen' % seen)
44
45 def test_get(self):
46 for k, v in self.items.items():
47 self.assertEqual(self.data[k], v)
48
49 self.assertIsNone(self.data.get('D'))
50 with self.assertRaises(KeyError):
51 self.data['D']
52
53 def test_set(self):
54 for k, v in self.items.items():
55 self.data[k] += '-foo'
56
57 for k, v in self.items.items():
58 self.assertEqual(self.data[k], v + '-foo')
59
60 def test_delete(self):
61 self.data['D'] = '4'
62 self.assertEqual(self.data['D'], '4')
63 del self.data['D']
64 self.assertIsNone(self.data.get('D'))
65 with self.assertRaises(KeyError):
66 self.data['D']
67
68 def test_contains(self):
69 for k in self.items:
70 self.assertTrue(k in self.data)
71 self.assertTrue(self.data.has_key(k))
72 self.assertFalse('NotFound' in self.data)
73 self.assertFalse(self.data.has_key('NotFound'))
74
75 def test_len(self):
76 self.assertEqual(len(self.data), len(self.items))
77
78 def test_iter(self):
79 self._iter_helper(set(self.items.keys()), self.data)
80
81 def test_itervalues(self):
82 self._iter_helper(set(self.items.values()), self.data.itervalues())
83
84 def test_iteritems(self):
85 self._iter_helper(set(self.items.items()), self.data.iteritems())
86
87 def test_get_by_pattern(self):
88 self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
89
90 def _stress_read(self, data):
91 for i in range(self.stress_count):
92 for k in self.items:
93 data[k]
94
95 def _stress_write(self, data):
96 for i in range(self.stress_count):
97 for k, v in self.items.items():
98 data[k] = v + str(i)
99
100 def _validate_stress(self):
101 for k, v in self.items.items():
102 self.assertEqual(self.data[k], v + str(self.stress_count - 1))
103
104 def test_stress(self):
105 self._stress_read(self.data)
106 self._stress_write(self.data)
107 self._validate_stress()
108
109 def test_stress_threads(self):
110 def read_thread():
111 data = self._create_data()
112 self._stress_read(data)
113
114 def write_thread():
115 data = self._create_data()
116 self._stress_write(data)
117
118 threads = []
119 for i in range(self.thread_count):
120 threads.append(threading.Thread(target=read_thread))
121 threads.append(threading.Thread(target=write_thread))
122
123 for t in threads:
124 t.start()
125 self._stress_read(self.data)
126 for t in threads:
127 t.join()
128 self._validate_stress()
129
diff --git a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
index b57650d591..80b003b2b5 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
+++ b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
@@ -9,7 +9,7 @@ def stamptask(d):
9 with open(stampname, "a+") as f: 9 with open(stampname, "a+") as f:
10 f.write(d.getVar("BB_UNIHASH") + "\n") 10 f.write(d.getVar("BB_UNIHASH") + "\n")
11 11
12 if d.getVar("BB_CURRENT_MC") != "default": 12 if d.getVar("BB_CURRENT_MC") != "":
13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") 13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}")
14 if thistask in d.getVar("SLOWTASKS").split(): 14 if thistask in d.getVar("SLOWTASKS").split():
15 bb.note("Slowing task %s" % thistask) 15 bb.note("Slowing task %s" % thistask)
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
new file mode 100644
index 0000000000..3c7dca0257
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
@@ -0,0 +1,2 @@
1do_build[mcdepends] = "mc::mc-1:h1:do_invalid"
2
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index cc87e8d6a8..74f5ded2e6 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -26,7 +26,7 @@ class RunQueueTests(unittest.TestCase):
26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" 26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot"
27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" 27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot"
28 28
29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): 29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False, allowfailure=False):
30 env = os.environ.copy() 30 env = os.environ.copy()
31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) 31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR" 32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
@@ -41,6 +41,8 @@ class RunQueueTests(unittest.TestCase):
41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) 41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
42 print(output) 42 print(output)
43 except subprocess.CalledProcessError as e: 43 except subprocess.CalledProcessError as e:
44 if allowfailure:
45 return e.output
44 self.fail("Command %s failed with %s" % (cmd, e.output)) 46 self.fail("Command %s failed with %s" % (cmd, e.output))
45 tasks = [] 47 tasks = []
46 tasklog = builddir + "/task.log" 48 tasklog = builddir + "/task.log"
@@ -314,6 +316,13 @@ class RunQueueTests(unittest.TestCase):
314 ["mc_2:a1:%s" % t for t in rerun_tasks] 316 ["mc_2:a1:%s" % t for t in rerun_tasks]
315 self.assertEqual(set(tasks), set(expected)) 317 self.assertEqual(set(tasks), set(expected))
316 318
319 # Check that a multiconfig that doesn't exist rasies a correct error message
320 error_output = self.run_bitbakecmd(["bitbake", "g1"], tempdir, "", extraenv=extraenv, cleanup=True, allowfailure=True)
321 self.assertIn("non-existent task", error_output)
322 # If the word 'Traceback' or 'KeyError' is in the output we've regressed
323 self.assertNotIn("Traceback", error_output)
324 self.assertNotIn("KeyError", error_output)
325
317 self.shutdown(tempdir) 326 self.shutdown(tempdir)
318 327
319 def test_hashserv_single(self): 328 def test_hashserv_single(self):
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index c363f62d7d..52b7bf85bf 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -130,6 +130,14 @@ class Checksum(unittest.TestCase):
130 checksum = bb.utils.sha256_file(f.name) 130 checksum = bb.utils.sha256_file(f.name)
131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") 131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f")
132 132
133 def test_goh1(self):
134 import hashlib
135 with tempfile.NamedTemporaryFile() as f:
136 f.write(self.filler)
137 f.flush()
138 checksum = bb.utils.goh1_file(f.name)
139 self.assertEqual(checksum, "81191f04d4abf413e5badd234814e4202d9efa73e6f9437e9ddd6b8165b569ef")
140
133class EditMetadataFile(unittest.TestCase): 141class EditMetadataFile(unittest.TestCase):
134 _origfile = """ 142 _origfile = """
135# A comment 143# A comment
@@ -684,3 +692,14 @@ class EnvironmentTests(unittest.TestCase):
684 self.assertIn("A", os.environ) 692 self.assertIn("A", os.environ)
685 self.assertEqual(os.environ["A"], "this is A") 693 self.assertEqual(os.environ["A"], "this is A")
686 self.assertNotIn("B", os.environ) 694 self.assertNotIn("B", os.environ)
695
696class FilemodeTests(unittest.TestCase):
697 def test_filemode_convert(self):
698 self.assertEqual(0o775, bb.utils.to_filemode("0o775"))
699 self.assertEqual(0o775, bb.utils.to_filemode(0o775))
700 self.assertEqual(0o775, bb.utils.to_filemode("775"))
701 with self.assertRaises(ValueError):
702 bb.utils.to_filemode("xyz")
703 with self.assertRaises(ValueError):
704 bb.utils.to_filemode("999")
705
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index dcd3910cc4..e7fbcbca0a 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -14,7 +14,8 @@ import time
14import atexit 14import atexit
15import re 15import re
16from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
17from functools import partial 17from functools import partial, wraps
18from contextlib import contextmanager
18 19
19import bb.cache 20import bb.cache
20import bb.cooker 21import bb.cooker
@@ -26,6 +27,135 @@ import bb.remotedata
26from bb.main import setup_bitbake, BitBakeConfigParameters 27from bb.main import setup_bitbake, BitBakeConfigParameters
27import bb.fetch2 28import bb.fetch2
28 29
30def wait_for(f):
31 """
32 Wrap a function that makes an asynchronous tinfoil call using
33 self.run_command() and wait for events to say that the call has been
34 successful, or an error has occurred.
35 """
36 @wraps(f)
37 def wrapper(self, *args, handle_events=True, extra_events=None, event_callback=None, **kwargs):
38 if handle_events:
39 # A reasonable set of default events matching up with those we handle below
40 eventmask = [
41 'bb.event.BuildStarted',
42 'bb.event.BuildCompleted',
43 'logging.LogRecord',
44 'bb.event.NoProvider',
45 'bb.command.CommandCompleted',
46 'bb.command.CommandFailed',
47 'bb.build.TaskStarted',
48 'bb.build.TaskFailed',
49 'bb.build.TaskSucceeded',
50 'bb.build.TaskFailedSilent',
51 'bb.build.TaskProgress',
52 'bb.runqueue.runQueueTaskStarted',
53 'bb.runqueue.sceneQueueTaskStarted',
54 'bb.event.ProcessStarted',
55 'bb.event.ProcessProgress',
56 'bb.event.ProcessFinished',
57 ]
58 if extra_events:
59 eventmask.extend(extra_events)
60 ret = self.set_event_mask(eventmask)
61
62 includelogs = self.config_data.getVar('BBINCLUDELOGS')
63 loglines = self.config_data.getVar('BBINCLUDELOGS_LINES')
64
65 # Call actual function
66 ret = f(self, *args, **kwargs)
67
68 if handle_events:
69 lastevent = time.time()
70 result = False
71 # Borrowed from knotty, instead somewhat hackily we use the helper
72 # as the object to store "shutdown" on
73 helper = bb.ui.uihelper.BBUIHelper()
74 helper.shutdown = 0
75 parseprogress = None
76 termfilter = bb.ui.knotty.TerminalFilter(helper, helper, self.logger.handlers, quiet=self.quiet)
77 try:
78 while True:
79 try:
80 event = self.wait_event(0.25)
81 if event:
82 lastevent = time.time()
83 if event_callback and event_callback(event):
84 continue
85 if helper.eventHandler(event):
86 if isinstance(event, bb.build.TaskFailedSilent):
87 self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
88 elif isinstance(event, bb.build.TaskFailed):
89 bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
90 continue
91 if isinstance(event, bb.event.ProcessStarted):
92 if self.quiet > 1:
93 continue
94 parseprogress = bb.ui.knotty.new_progress(event.processname, event.total)
95 parseprogress.start(False)
96 continue
97 if isinstance(event, bb.event.ProcessProgress):
98 if self.quiet > 1:
99 continue
100 if parseprogress:
101 parseprogress.update(event.progress)
102 else:
103 bb.warn("Got ProcessProgress event for something that never started?")
104 continue
105 if isinstance(event, bb.event.ProcessFinished):
106 if self.quiet > 1:
107 continue
108 if parseprogress:
109 parseprogress.finish()
110 parseprogress = None
111 continue
112 if isinstance(event, bb.command.CommandCompleted):
113 result = True
114 break
115 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
116 self.logger.error(str(event))
117 result = False
118 break
119 if isinstance(event, logging.LogRecord):
120 if event.taskpid == 0 or event.levelno > logging.INFO:
121 self.logger.handle(event)
122 continue
123 if isinstance(event, bb.event.NoProvider):
124 self.logger.error(str(event))
125 result = False
126 break
127 elif helper.shutdown > 1:
128 break
129 termfilter.updateFooter()
130 if time.time() > (lastevent + (3*60)):
131 if not self.run_command('ping', handle_events=False):
132 print("\nUnable to ping server and no events, closing down...\n")
133 return False
134 except KeyboardInterrupt:
135 termfilter.clearFooter()
136 if helper.shutdown == 1:
137 print("\nSecond Keyboard Interrupt, stopping...\n")
138 ret = self.run_command("stateForceShutdown")
139 if ret and ret[2]:
140 self.logger.error("Unable to cleanly stop: %s" % ret[2])
141 elif helper.shutdown == 0:
142 print("\nKeyboard Interrupt, closing down...\n")
143 interrupted = True
144 ret = self.run_command("stateShutdown")
145 if ret and ret[2]:
146 self.logger.error("Unable to cleanly shutdown: %s" % ret[2])
147 helper.shutdown = helper.shutdown + 1
148 termfilter.clearFooter()
149 finally:
150 termfilter.finish()
151 if helper.failed_tasks:
152 result = False
153 return result
154 else:
155 return ret
156
157 return wrapper
158
29 159
30# We need this in order to shut down the connection to the bitbake server, 160# We need this in order to shut down the connection to the bitbake server,
31# otherwise the process will never properly exit 161# otherwise the process will never properly exit
@@ -188,11 +318,19 @@ class TinfoilCookerAdapter:
188 self._cache[name] = attrvalue 318 self._cache[name] = attrvalue
189 return attrvalue 319 return attrvalue
190 320
321 class TinfoilSkiplistByMcAdapter:
322 def __init__(self, tinfoil):
323 self.tinfoil = tinfoil
324
325 def __getitem__(self, mc):
326 return self.tinfoil.get_skipped_recipes(mc)
327
191 def __init__(self, tinfoil): 328 def __init__(self, tinfoil):
192 self.tinfoil = tinfoil 329 self.tinfoil = tinfoil
193 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() 330 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
194 self.collections = {} 331 self.collections = {}
195 self.recipecaches = {} 332 self.recipecaches = {}
333 self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil)
196 for mc in self.multiconfigs: 334 for mc in self.multiconfigs:
197 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) 335 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
198 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) 336 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
@@ -201,8 +339,6 @@ class TinfoilCookerAdapter:
201 # Grab these only when they are requested since they aren't always used 339 # Grab these only when they are requested since they aren't always used
202 if name in self._cache: 340 if name in self._cache:
203 return self._cache[name] 341 return self._cache[name]
204 elif name == 'skiplist':
205 attrvalue = self.tinfoil.get_skipped_recipes()
206 elif name == 'bbfile_config_priorities': 342 elif name == 'bbfile_config_priorities':
207 ret = self.tinfoil.run_command('getLayerPriorities') 343 ret = self.tinfoil.run_command('getLayerPriorities')
208 bbfile_config_priorities = [] 344 bbfile_config_priorities = []
@@ -514,12 +650,12 @@ class Tinfoil:
514 """ 650 """
515 return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) 651 return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
516 652
517 def get_skipped_recipes(self): 653 def get_skipped_recipes(self, mc=''):
518 """ 654 """
519 Find recipes which were skipped (i.e. SkipRecipe was raised 655 Find recipes which were skipped (i.e. SkipRecipe was raised
520 during parsing). 656 during parsing).
521 """ 657 """
522 return OrderedDict(self.run_command('getSkippedRecipes')) 658 return OrderedDict(self.run_command('getSkippedRecipes', mc))
523 659
524 def get_all_providers(self, mc=''): 660 def get_all_providers(self, mc=''):
525 return defaultdict(list, self.run_command('allProviders', mc)) 661 return defaultdict(list, self.run_command('allProviders', mc))
@@ -533,6 +669,7 @@ class Tinfoil:
533 def get_runtime_providers(self, rdep): 669 def get_runtime_providers(self, rdep):
534 return self.run_command('getRuntimeProviders', rdep) 670 return self.run_command('getRuntimeProviders', rdep)
535 671
672 # TODO: teach this method about mc
536 def get_recipe_file(self, pn): 673 def get_recipe_file(self, pn):
537 """ 674 """
538 Get the file name for the specified recipe/target. Raises 675 Get the file name for the specified recipe/target. Raises
@@ -541,6 +678,7 @@ class Tinfoil:
541 """ 678 """
542 best = self.find_best_provider(pn) 679 best = self.find_best_provider(pn)
543 if not best or (len(best) > 3 and not best[3]): 680 if not best or (len(best) > 3 and not best[3]):
681 # TODO: pass down mc
544 skiplist = self.get_skipped_recipes() 682 skiplist = self.get_skipped_recipes()
545 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) 683 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
546 skipreasons = taskdata.get_reasons(pn) 684 skipreasons = taskdata.get_reasons(pn)
@@ -633,6 +771,29 @@ class Tinfoil:
633 fn = self.get_recipe_file(pn) 771 fn = self.get_recipe_file(pn)
634 return self.parse_recipe_file(fn) 772 return self.parse_recipe_file(fn)
635 773
774 @contextmanager
775 def _data_tracked_if_enabled(self):
776 """
777 A context manager to enable data tracking for a code segment if data
778 tracking was enabled for this tinfoil instance.
779 """
780 if self.tracking:
781 # Enable history tracking just for the operation
782 self.run_command('enableDataTracking')
783
784 # Here goes the operation with the optional data tracking
785 yield
786
787 if self.tracking:
788 self.run_command('disableDataTracking')
789
790 def finalizeData(self):
791 """
792 Run anonymous functions and expand keys
793 """
794 with self._data_tracked_if_enabled():
795 return self._reconvert_type(self.run_command('finalizeData'), 'DataStoreConnectionHandle')
796
636 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): 797 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
637 """ 798 """
638 Parse the specified recipe file (with or without bbappends) 799 Parse the specified recipe file (with or without bbappends)
@@ -645,10 +806,7 @@ class Tinfoil:
645 appendlist: optional list of bbappend files to apply, if you 806 appendlist: optional list of bbappend files to apply, if you
646 want to filter them 807 want to filter them
647 """ 808 """
648 if self.tracking: 809 with self._data_tracked_if_enabled():
649 # Enable history tracking just for the parse operation
650 self.run_command('enableDataTracking')
651 try:
652 if appends and appendlist == []: 810 if appends and appendlist == []:
653 appends = False 811 appends = False
654 if config_data: 812 if config_data:
@@ -660,9 +818,6 @@ class Tinfoil:
660 return self._reconvert_type(dscon, 'DataStoreConnectionHandle') 818 return self._reconvert_type(dscon, 'DataStoreConnectionHandle')
661 else: 819 else:
662 return None 820 return None
663 finally:
664 if self.tracking:
665 self.run_command('disableDataTracking')
666 821
667 def build_file(self, buildfile, task, internal=True): 822 def build_file(self, buildfile, task, internal=True):
668 """ 823 """
@@ -674,6 +829,10 @@ class Tinfoil:
674 """ 829 """
675 return self.run_command('buildFile', buildfile, task, internal) 830 return self.run_command('buildFile', buildfile, task, internal)
676 831
832 @wait_for
833 def build_file_sync(self, *args):
834 self.build_file(*args)
835
677 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None): 836 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None):
678 """ 837 """
679 Builds the specified targets. This is equivalent to a normal invocation 838 Builds the specified targets. This is equivalent to a normal invocation
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 8b212b7803..4ee45d67a2 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -559,7 +559,10 @@ class ORMWrapper(object):
559 # we might have an invalid link; no way to detect this. just set it to None 559 # we might have an invalid link; no way to detect this. just set it to None
560 filetarget_obj = None 560 filetarget_obj = None
561 561
562 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 try:
563 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564 except Target_File.DoesNotExist:
565 parent_obj = None
563 566
564 Target_File.objects.create( 567 Target_File.objects.create(
565 target = target_obj, 568 target = target_obj,
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index f86999bb09..9a589a5c8e 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -24,6 +24,12 @@ import atexit
24from itertools import groupby 24from itertools import groupby
25 25
26from bb.ui import uihelper 26from bb.ui import uihelper
27import bb.build
28import bb.command
29import bb.cooker
30import bb.event
31import bb.runqueue
32import bb.utils
27 33
28featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] 34featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
29 35
@@ -103,7 +109,7 @@ def new_progress(msg, maxval):
103 return NonInteractiveProgress(msg, maxval) 109 return NonInteractiveProgress(msg, maxval)
104 110
105def pluralise(singular, plural, qty): 111def pluralise(singular, plural, qty):
106 if(qty == 1): 112 if qty == 1:
107 return singular % qty 113 return singular % qty
108 else: 114 else:
109 return plural % qty 115 return plural % qty
@@ -112,6 +118,7 @@ def pluralise(singular, plural, qty):
112class InteractConsoleLogFilter(logging.Filter): 118class InteractConsoleLogFilter(logging.Filter):
113 def __init__(self, tf): 119 def __init__(self, tf):
114 self.tf = tf 120 self.tf = tf
121 super().__init__()
115 122
116 def filter(self, record): 123 def filter(self, record):
117 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): 124 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
@@ -346,7 +353,7 @@ def print_event_log(event, includelogs, loglines, termfilter):
346 termfilter.clearFooter() 353 termfilter.clearFooter()
347 bb.error("Logfile of failure stored in: %s" % logfile) 354 bb.error("Logfile of failure stored in: %s" % logfile)
348 if includelogs and not event.errprinted: 355 if includelogs and not event.errprinted:
349 print("Log data follows:") 356 bb.plain("Log data follows:")
350 f = open(logfile, "r") 357 f = open(logfile, "r")
351 lines = [] 358 lines = []
352 while True: 359 while True:
@@ -359,11 +366,11 @@ def print_event_log(event, includelogs, loglines, termfilter):
359 if len(lines) > int(loglines): 366 if len(lines) > int(loglines):
360 lines.pop(0) 367 lines.pop(0)
361 else: 368 else:
362 print('| %s' % l) 369 bb.plain('| %s' % l)
363 f.close() 370 f.close()
364 if lines: 371 if lines:
365 for line in lines: 372 for line in lines:
366 print(line) 373 bb.plain(line)
367 374
368def _log_settings_from_server(server, observe_only): 375def _log_settings_from_server(server, observe_only):
369 # Get values of variables which control our output 376 # Get values of variables which control our output
@@ -555,13 +562,23 @@ def main(server, eventHandler, params, tf = TerminalFilter):
555 } 562 }
556 }) 563 })
557 564
558 bb.utils.mkdirhier(os.path.dirname(consolelogfile)) 565 consolelogdirname = os.path.dirname(consolelogfile)
559 loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log') 566 # `bb.utils.mkdirhier` has this check, but it reports failure using bb.fatal, which logs
567 # to the very logger we are trying to set up.
568 if '${' in str(consolelogdirname):
569 print(
570 "FATAL: Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR pollution.".format(
571 consolelogdirname))
572 if '${MACHINE}' in consolelogdirname:
573 print("HINT: It looks like you forgot to set MACHINE in local.conf.")
574
575 bb.utils.mkdirhier(consolelogdirname)
576 loglink = os.path.join(consolelogdirname, 'console-latest.log')
560 bb.utils.remove(loglink) 577 bb.utils.remove(loglink)
561 try: 578 try:
562 os.symlink(os.path.basename(consolelogfile), loglink) 579 os.symlink(os.path.basename(consolelogfile), loglink)
563 except OSError: 580 except OSError:
564 pass 581 pass
565 582
566 # Add the logging domains specified by the user on the command line 583 # Add the logging domains specified by the user on the command line
567 for (domainarg, iterator) in groupby(params.debug_domains): 584 for (domainarg, iterator) in groupby(params.debug_domains):
@@ -577,6 +594,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
577 else: 594 else:
578 log_exec_tty = False 595 log_exec_tty = False
579 596
597 should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
598
580 helper = uihelper.BBUIHelper() 599 helper = uihelper.BBUIHelper()
581 600
582 # Look for the specially designated handlers which need to be passed to the 601 # Look for the specially designated handlers which need to be passed to the
@@ -640,7 +659,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
640 return_value = 0 659 return_value = 0
641 errors = 0 660 errors = 0
642 warnings = 0 661 warnings = 0
643 taskfailures = [] 662 taskfailures = {}
644 663
645 printintervaldelta = 10 * 60 # 10 minutes 664 printintervaldelta = 10 * 60 # 10 minutes
646 printinterval = printintervaldelta 665 printinterval = printintervaldelta
@@ -726,6 +745,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
726 if isinstance(event, bb.build.TaskFailed): 745 if isinstance(event, bb.build.TaskFailed):
727 return_value = 1 746 return_value = 1
728 print_event_log(event, includelogs, loglines, termfilter) 747 print_event_log(event, includelogs, loglines, termfilter)
748 k = "{}:{}".format(event._fn, event._task)
749 taskfailures[k] = event.logfile
729 if isinstance(event, bb.build.TaskBase): 750 if isinstance(event, bb.build.TaskBase):
730 logger.info(event._message) 751 logger.info(event._message)
731 continue 752 continue
@@ -821,7 +842,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
821 842
822 if isinstance(event, bb.runqueue.runQueueTaskFailed): 843 if isinstance(event, bb.runqueue.runQueueTaskFailed):
823 return_value = 1 844 return_value = 1
824 taskfailures.append(event.taskstring) 845 taskfailures.setdefault(event.taskstring)
825 logger.error(str(event)) 846 logger.error(str(event))
826 continue 847 continue
827 848
@@ -942,11 +963,21 @@ def main(server, eventHandler, params, tf = TerminalFilter):
942 try: 963 try:
943 termfilter.clearFooter() 964 termfilter.clearFooter()
944 summary = "" 965 summary = ""
966 def format_hyperlink(url, link_text):
967 if should_print_hyperlinks:
968 start = f'\033]8;;{url}\033\\'
969 end = '\033]8;;\033\\'
970 return f'{start}{link_text}{end}'
971 return link_text
972
945 if taskfailures: 973 if taskfailures:
946 summary += pluralise("\nSummary: %s task failed:", 974 summary += pluralise("\nSummary: %s task failed:",
947 "\nSummary: %s tasks failed:", len(taskfailures)) 975 "\nSummary: %s tasks failed:", len(taskfailures))
948 for failure in taskfailures: 976 for (failure, log_file) in taskfailures.items():
949 summary += "\n %s" % failure 977 summary += "\n %s" % failure
978 if log_file:
979 hyperlink = format_hyperlink(f"file://{log_file}", log_file)
980 summary += "\n log: {}".format(hyperlink)
950 if warnings: 981 if warnings:
951 summary += pluralise("\nSummary: There was %s WARNING message.", 982 summary += pluralise("\nSummary: There was %s WARNING message.",
952 "\nSummary: There were %s WARNING messages.", warnings) 983 "\nSummary: There were %s WARNING messages.", warnings)
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/bitbake/lib/bb/ui/teamcity.py
+++ b/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
30import bb.command 30import bb.command
31import bb.cooker 31import bb.cooker
32import bb.event 32import bb.event
33import bb.exceptions
34import bb.runqueue 33import bb.runqueue
35from bb.ui import uihelper 34from bb.ui import uihelper
36 35
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
102 details = "" 101 details = ""
103 if hasattr(record, 'bb_exc_formatted'): 102 if hasattr(record, 'bb_exc_formatted'):
104 details = ''.join(record.bb_exc_formatted) 103 details = ''.join(record.bb_exc_formatted)
105 elif hasattr(record, 'bb_exc_info'):
106 etype, value, tb = record.bb_exc_info
107 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
108 details = ''.join(formatted)
109 104
110 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: 105 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
111 # ERROR gets a separate errorDetails field 106 # ERROR gets a separate errorDetails field
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 82913e0da8..e6983bd559 100644
--- a/bitbake/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -31,7 +31,7 @@ class BBUIHelper:
31 31
32 if isinstance(event, bb.build.TaskStarted): 32 if isinstance(event, bb.build.TaskStarted):
33 tid = event._fn + ":" + event._task 33 tid = event._fn + ":" + event._task
34 if event._mc != "default": 34 if event._mc != "":
35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
36 else: 36 else:
37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index ebee65d3dd..1cc74ed546 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -11,11 +11,8 @@ import re, fcntl, os, string, stat, shutil, time
11import sys 11import sys
12import errno 12import errno
13import logging 13import logging
14import bb
15import bb.msg
16import locale 14import locale
17import multiprocessing 15import multiprocessing
18import fcntl
19import importlib 16import importlib
20import importlib.machinery 17import importlib.machinery
21import importlib.util 18import importlib.util
@@ -24,7 +21,6 @@ import subprocess
24import glob 21import glob
25import fnmatch 22import fnmatch
26import traceback 23import traceback
27import errno
28import signal 24import signal
29import collections 25import collections
30import copy 26import copy
@@ -36,6 +32,8 @@ import tempfile
36from subprocess import getstatusoutput 32from subprocess import getstatusoutput
37from contextlib import contextmanager 33from contextlib import contextmanager
38from ctypes import cdll 34from ctypes import cdll
35import bb
36import bb.msg
39 37
40logger = logging.getLogger("BitBake.Util") 38logger = logging.getLogger("BitBake.Util")
41python_extensions = importlib.machinery.all_suffixes() 39python_extensions = importlib.machinery.all_suffixes()
@@ -84,7 +82,16 @@ def explode_version(s):
84 return r 82 return r
85 83
86def split_version(s): 84def split_version(s):
87 """Split a version string into its constituent parts (PE, PV, PR)""" 85 """Split a version string into its constituent parts (PE, PV, PR).
86
87 Arguments:
88
89 - ``s``: version string. The format of the input string should be::
90
91 ${PE}:${PV}-${PR}
92
93 Returns a tuple ``(pe, pv, pr)``.
94 """
88 s = s.strip(" <>=") 95 s = s.strip(" <>=")
89 e = 0 96 e = 0
90 if s.count(':'): 97 if s.count(':'):
@@ -136,16 +143,30 @@ def vercmp(ta, tb):
136 return r 143 return r
137 144
138def vercmp_string(a, b): 145def vercmp_string(a, b):
139 """ Split version strings and compare them """ 146 """ Split version strings using ``bb.utils.split_version()`` and compare
147 them with ``bb.utils.vercmp().``
148
149 Arguments:
150
151 - ``a``: left version string operand.
152 - ``b``: right version string operand.
153
154 Returns what ``bb.utils.vercmp()`` returns."""
140 ta = split_version(a) 155 ta = split_version(a)
141 tb = split_version(b) 156 tb = split_version(b)
142 return vercmp(ta, tb) 157 return vercmp(ta, tb)
143 158
144def vercmp_string_op(a, b, op): 159def vercmp_string_op(a, b, op):
145 """ 160 """
146 Compare two versions and check if the specified comparison operator matches the result of the comparison. 161 Takes the return value ``bb.utils.vercmp()`` and returns the operation
147 This function is fairly liberal about what operators it will accept since there are a variety of styles 162 defined by ``op`` between the return value and 0.
148 depending on the context. 163
164 Arguments:
165
166 - ``a``: left version string operand.
167 - ``b``: right version string operand.
168 - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``,
169 ``>``, ``>>``, ``<``, ``<<`` or ``!=``.
149 """ 170 """
150 res = vercmp_string(a, b) 171 res = vercmp_string(a, b)
151 if op in ('=', '=='): 172 if op in ('=', '=='):
@@ -165,9 +186,16 @@ def vercmp_string_op(a, b, op):
165 186
166def explode_deps(s): 187def explode_deps(s):
167 """ 188 """
168 Take an RDEPENDS style string of format: 189 Takes an RDEPENDS style string of format::
169 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 190
170 and return a list of dependencies. 191 DEPEND1 (optional version) DEPEND2 (optional version) ...
192
193 Arguments:
194
195 - ``s``: input RDEPENDS style string
196
197 Returns a list of dependencies.
198
171 Version information is ignored. 199 Version information is ignored.
172 """ 200 """
173 r = [] 201 r = []
@@ -189,9 +217,17 @@ def explode_deps(s):
189 217
190def explode_dep_versions2(s, *, sort=True): 218def explode_dep_versions2(s, *, sort=True):
191 """ 219 """
192 Take an RDEPENDS style string of format: 220 Takes an RDEPENDS style string of format::
193 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 221
194 and return a dictionary of dependencies and versions. 222 DEPEND1 (optional version) DEPEND2 (optional version) ...
223
224 Arguments:
225
226 - ``s``: input RDEPENDS style string
227 - ``*``: *Unused*.
228 - ``sort``: whether to sort the output or not.
229
230 Returns a dictionary of dependencies and versions.
195 """ 231 """
196 r = collections.OrderedDict() 232 r = collections.OrderedDict()
197 l = s.replace(",", "").split() 233 l = s.replace(",", "").split()
@@ -256,10 +292,17 @@ def explode_dep_versions2(s, *, sort=True):
256 292
257def explode_dep_versions(s): 293def explode_dep_versions(s):
258 """ 294 """
259 Take an RDEPENDS style string of format: 295 Take an RDEPENDS style string of format::
260 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 296
261 skip null value and items appeared in dependency string multiple times 297 DEPEND1 (optional version) DEPEND2 (optional version) ...
262 and return a dictionary of dependencies and versions. 298
299 Skips null values and items appeared in dependency string multiple times.
300
301 Arguments:
302
303 - ``s``: input RDEPENDS style string
304
305 Returns a dictionary of dependencies and versions.
263 """ 306 """
264 r = explode_dep_versions2(s) 307 r = explode_dep_versions2(s)
265 for d in r: 308 for d in r:
@@ -273,7 +316,17 @@ def explode_dep_versions(s):
273 316
274def join_deps(deps, commasep=True): 317def join_deps(deps, commasep=True):
275 """ 318 """
276 Take the result from explode_dep_versions and generate a dependency string 319 Take a result from ``bb.utils.explode_dep_versions()`` and generate a
320 dependency string.
321
322 Arguments:
323
324 - ``deps``: dictionary of dependencies and versions.
325 - ``commasep``: makes the return value separated by commas if ``True``,
326 separated by spaces otherwise.
327
328 Returns a comma-separated (space-separated if ``comma-sep`` is ``False``)
329 string of dependencies and versions.
277 """ 330 """
278 result = [] 331 result = []
279 for dep in deps: 332 for dep in deps:
@@ -435,7 +488,11 @@ def better_eval(source, locals, extraglobals = None):
435 488
436@contextmanager 489@contextmanager
437def fileslocked(files, *args, **kwargs): 490def fileslocked(files, *args, **kwargs):
438 """Context manager for locking and unlocking file locks.""" 491 """Context manager for locking and unlocking file locks. Uses
492 ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock
493 files.
494
495 No return value."""
439 locks = [] 496 locks = []
440 if files: 497 if files:
441 for lockfile in files: 498 for lockfile in files:
@@ -446,19 +503,29 @@ def fileslocked(files, *args, **kwargs):
446 try: 503 try:
447 yield 504 yield
448 finally: 505 finally:
506 locks.reverse()
449 for lock in locks: 507 for lock in locks:
450 bb.utils.unlockfile(lock) 508 bb.utils.unlockfile(lock)
451 509
452def lockfile(name, shared=False, retry=True, block=False): 510def lockfile(name, shared=False, retry=True, block=False):
453 """ 511 """
454 Use the specified file as a lock file, return when the lock has 512 Use the specified file (with filename ``name``) as a lock file, return when
455 been acquired. Returns a variable to pass to unlockfile(). 513 the lock has been acquired. Returns a variable to pass to unlockfile().
456 Parameters: 514
457 retry: True to re-try locking if it fails, False otherwise 515 Arguments:
458 block: True to block until the lock succeeds, False otherwise 516
517 - ``shared``: sets the lock as a shared lock instead of an
518 exclusive lock.
519 - ``retry``: ``True`` to re-try locking if it fails, ``False``
520 otherwise.
521 - ``block``: ``True`` to block until the lock succeeds,
522 ``False`` otherwise.
523
459 The retry and block parameters are kind of equivalent unless you 524 The retry and block parameters are kind of equivalent unless you
460 consider the possibility of sending a signal to the process to break 525 consider the possibility of sending a signal to the process to break
461 out - at which point you want block=True rather than retry=True. 526 out - at which point you want block=True rather than retry=True.
527
528 Returns the locked file descriptor in case of success, ``None`` otherwise.
462 """ 529 """
463 basename = os.path.basename(name) 530 basename = os.path.basename(name)
464 if len(basename) > 255: 531 if len(basename) > 255:
@@ -517,7 +584,13 @@ def lockfile(name, shared=False, retry=True, block=False):
517 584
518def unlockfile(lf): 585def unlockfile(lf):
519 """ 586 """
520 Unlock a file locked using lockfile() 587 Unlock a file locked using ``bb.utils.lockfile()``.
588
589 Arguments:
590
591 - ``lf``: the locked file descriptor.
592
593 No return value.
521 """ 594 """
522 try: 595 try:
523 # If we had a shared lock, we need to promote to exclusive before 596 # If we had a shared lock, we need to promote to exclusive before
@@ -545,7 +618,11 @@ def _hasher(method, filename):
545 618
546def md5_file(filename): 619def md5_file(filename):
547 """ 620 """
548 Return the hex string representation of the MD5 checksum of filename. 621 Arguments:
622
623 - ``filename``: path to the input file.
624
625 Returns the hexadecimal string representation of the MD5 checksum of filename.
549 """ 626 """
550 import hashlib 627 import hashlib
551 try: 628 try:
@@ -557,36 +634,81 @@ def md5_file(filename):
557 634
558def sha256_file(filename): 635def sha256_file(filename):
559 """ 636 """
560 Return the hex string representation of the 256-bit SHA checksum of 637 Returns the hexadecimal representation of the 256-bit SHA checksum of
561 filename. 638 filename.
639
640 Arguments:
641
642 - ``filename``: path to the file.
562 """ 643 """
563 import hashlib 644 import hashlib
564 return _hasher(hashlib.sha256(), filename) 645 return _hasher(hashlib.sha256(), filename)
565 646
566def sha1_file(filename): 647def sha1_file(filename):
567 """ 648 """
568 Return the hex string representation of the SHA1 checksum of the filename 649 Returns the hexadecimal representation of the SHA1 checksum of the filename
650
651 Arguments:
652
653 - ``filename``: path to the file.
569 """ 654 """
570 import hashlib 655 import hashlib
571 return _hasher(hashlib.sha1(), filename) 656 return _hasher(hashlib.sha1(), filename)
572 657
573def sha384_file(filename): 658def sha384_file(filename):
574 """ 659 """
575 Return the hex string representation of the SHA384 checksum of the filename 660 Returns the hexadecimal representation of the SHA384 checksum of the filename
661
662 Arguments:
663
664 - ``filename``: path to the file.
576 """ 665 """
577 import hashlib 666 import hashlib
578 return _hasher(hashlib.sha384(), filename) 667 return _hasher(hashlib.sha384(), filename)
579 668
580def sha512_file(filename): 669def sha512_file(filename):
581 """ 670 """
582 Return the hex string representation of the SHA512 checksum of the filename 671 Returns the hexadecimal representation of the SHA512 checksum of the filename
672
673 Arguments:
674
675 - ``filename``: path to the file.
583 """ 676 """
584 import hashlib 677 import hashlib
585 return _hasher(hashlib.sha512(), filename) 678 return _hasher(hashlib.sha512(), filename)
586 679
680def goh1_file(filename):
681 """
682 Returns the hexadecimal string representation of the Go mod h1 checksum of the
683 filename. The Go mod h1 checksum uses the Go dirhash package. The package
684 defines hashes over directory trees and is used by go mod for mod files and
685 zip archives.
686
687 Arguments:
688
689 - ``filename``: path to the file.
690 """
691 import hashlib
692 import zipfile
693
694 lines = []
695 if zipfile.is_zipfile(filename):
696 with zipfile.ZipFile(filename) as archive:
697 for fn in sorted(archive.namelist()):
698 method = hashlib.sha256()
699 method.update(archive.read(fn))
700 hash = method.hexdigest()
701 lines.append("%s %s\n" % (hash, fn))
702 else:
703 hash = _hasher(hashlib.sha256(), filename)
704 lines.append("%s go.mod\n" % hash)
705 method = hashlib.sha256()
706 method.update("".join(lines).encode('utf-8'))
707 return method.hexdigest()
708
587def preserved_envvars_exported(): 709def preserved_envvars_exported():
588 """Variables which are taken from the environment and placed in and exported 710 """Returns the list of variables which are taken from the environment and
589 from the metadata""" 711 placed in and exported from the metadata."""
590 return [ 712 return [
591 'BB_TASKHASH', 713 'BB_TASKHASH',
592 'HOME', 714 'HOME',
@@ -600,7 +722,8 @@ def preserved_envvars_exported():
600 ] 722 ]
601 723
602def preserved_envvars(): 724def preserved_envvars():
603 """Variables which are taken from the environment and placed in the metadata""" 725 """Returns the list of variables which are taken from the environment and
726 placed in the metadata."""
604 v = [ 727 v = [
605 'BBPATH', 728 'BBPATH',
606 'BB_PRESERVE_ENV', 729 'BB_PRESERVE_ENV',
@@ -609,7 +732,9 @@ def preserved_envvars():
609 return v + preserved_envvars_exported() 732 return v + preserved_envvars_exported()
610 733
611def check_system_locale(): 734def check_system_locale():
612 """Make sure the required system locale are available and configured""" 735 """Make sure the required system locale are available and configured.
736
737 No return value."""
613 default_locale = locale.getlocale(locale.LC_CTYPE) 738 default_locale = locale.getlocale(locale.LC_CTYPE)
614 739
615 try: 740 try:
@@ -627,6 +752,12 @@ def filter_environment(good_vars):
627 """ 752 """
628 Create a pristine environment for bitbake. This will remove variables that 753 Create a pristine environment for bitbake. This will remove variables that
629 are not known and may influence the build in a negative way. 754 are not known and may influence the build in a negative way.
755
756 Arguments:
757
758 - ``good_vars``: list of variable to exclude from the filtering.
759
760 No return value.
630 """ 761 """
631 762
632 removed_vars = {} 763 removed_vars = {}
@@ -671,6 +802,8 @@ def clean_environment():
671 """ 802 """
672 Clean up any spurious environment variables. This will remove any 803 Clean up any spurious environment variables. This will remove any
673 variables the user hasn't chosen to preserve. 804 variables the user hasn't chosen to preserve.
805
806 No return value.
674 """ 807 """
675 if 'BB_PRESERVE_ENV' not in os.environ: 808 if 'BB_PRESERVE_ENV' not in os.environ:
676 good_vars = approved_variables() 809 good_vars = approved_variables()
@@ -681,6 +814,8 @@ def clean_environment():
681def empty_environment(): 814def empty_environment():
682 """ 815 """
683 Remove all variables from the environment. 816 Remove all variables from the environment.
817
818 No return value.
684 """ 819 """
685 for s in list(os.environ.keys()): 820 for s in list(os.environ.keys()):
686 os.unsetenv(s) 821 os.unsetenv(s)
@@ -689,6 +824,12 @@ def empty_environment():
689def build_environment(d): 824def build_environment(d):
690 """ 825 """
691 Build an environment from all exported variables. 826 Build an environment from all exported variables.
827
828 Arguments:
829
830 - ``d``: the data store.
831
832 No return value.
692 """ 833 """
693 import bb.data 834 import bb.data
694 for var in bb.data.keys(d): 835 for var in bb.data.keys(d):
@@ -713,7 +854,17 @@ def _check_unsafe_delete_path(path):
713 return False 854 return False
714 855
715def remove(path, recurse=False, ionice=False): 856def remove(path, recurse=False, ionice=False):
716 """Equivalent to rm -f or rm -rf""" 857 """Equivalent to rm -f or rm -rf.
858
859 Arguments:
860
861 - ``path``: path to file/directory to remove.
862 - ``recurse``: deletes recursively if ``True``.
863 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
864 ionice``.
865
866 No return value.
867 """
717 if not path: 868 if not path:
718 return 869 return
719 if recurse: 870 if recurse:
@@ -734,7 +885,17 @@ def remove(path, recurse=False, ionice=False):
734 raise 885 raise
735 886
736def prunedir(topdir, ionice=False): 887def prunedir(topdir, ionice=False):
737 """ Delete everything reachable from the directory named in 'topdir'. """ 888 """
889 Delete everything reachable from the directory named in ``topdir``.
890
891 Arguments:
892
893 - ``topdir``: directory path.
894 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
895 ionice``.
896
897 No return value.
898 """
738 # CAUTION: This is dangerous! 899 # CAUTION: This is dangerous!
739 if _check_unsafe_delete_path(topdir): 900 if _check_unsafe_delete_path(topdir):
740 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) 901 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
@@ -746,8 +907,15 @@ def prunedir(topdir, ionice=False):
746# 907#
747def prune_suffix(var, suffixes, d): 908def prune_suffix(var, suffixes, d):
748 """ 909 """
749 See if var ends with any of the suffixes listed and 910 Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and
750 remove it if found 911 remove it if found.
912
913 Arguments:
914
915 - ``var``: string to check for suffixes.
916 - ``suffixes``: list of strings representing suffixes to check for.
917
918 Returns the string ``var`` without the suffix.
751 """ 919 """
752 for suffix in suffixes: 920 for suffix in suffixes:
753 if suffix and var.endswith(suffix): 921 if suffix and var.endswith(suffix):
@@ -756,7 +924,13 @@ def prune_suffix(var, suffixes, d):
756 924
757def mkdirhier(directory): 925def mkdirhier(directory):
758 """Create a directory like 'mkdir -p', but does not complain if 926 """Create a directory like 'mkdir -p', but does not complain if
759 directory already exists like os.makedirs 927 directory already exists like ``os.makedirs()``.
928
929 Arguments:
930
931 - ``directory``: path to the directory.
932
933 No return value.
760 """ 934 """
761 if '${' in str(directory): 935 if '${' in str(directory):
762 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory)) 936 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
@@ -767,10 +941,24 @@ def mkdirhier(directory):
767 raise e 941 raise e
768 942
769def movefile(src, dest, newmtime = None, sstat = None): 943def movefile(src, dest, newmtime = None, sstat = None):
770 """Moves a file from src to dest, preserving all permissions and 944 """Moves a file from ``src`` to ``dest``, preserving all permissions and
771 attributes; mtime will be preserved even when moving across 945 attributes; mtime will be preserved even when moving across
772 filesystems. Returns true on success and false on failure. Move is 946 filesystems. Returns ``True`` on success and ``False`` on failure. Move is
773 atomic. 947 atomic.
948
949 Arguments:
950
951 - ``src`` -- Source file.
952 - ``dest`` -- Destination file.
953 - ``newmtime`` -- new mtime to be passed as float seconds since the epoch.
954 - ``sstat`` -- os.stat_result to use for the destination file.
955
956 Returns an ``os.stat_result`` of the destination file if the
957 source file is a symbolic link or the ``sstat`` argument represents a
958 symbolic link - in which case the destination file will also be created as
959 a symbolic link.
960
961 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
774 """ 962 """
775 963
776 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 964 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
@@ -861,9 +1049,24 @@ def movefile(src, dest, newmtime = None, sstat = None):
861 1049
862def copyfile(src, dest, newmtime = None, sstat = None): 1050def copyfile(src, dest, newmtime = None, sstat = None):
863 """ 1051 """
864 Copies a file from src to dest, preserving all permissions and 1052 Copies a file from ``src`` to ``dest``, preserving all permissions and
865 attributes; mtime will be preserved even when moving across 1053 attributes; mtime will be preserved even when moving across
866 filesystems. Returns true on success and false on failure. 1054 filesystems.
1055
1056 Arguments:
1057
1058 - ``src``: Source file.
1059 - ``dest``: Destination file.
1060 - ``newmtime``: new mtime to be passed as float seconds since the epoch.
1061 - ``sstat``: os.stat_result to use for the destination file.
1062
1063 Returns an ``os.stat_result`` of the destination file if the
1064 source file is a symbolic link or the ``sstat`` argument represents a
1065 symbolic link - in which case the destination file will also be created as
1066 a symbolic link.
1067
1068 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
1069
867 """ 1070 """
868 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 1071 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
869 try: 1072 try:
@@ -941,10 +1144,16 @@ def copyfile(src, dest, newmtime = None, sstat = None):
941 1144
942def break_hardlinks(src, sstat = None): 1145def break_hardlinks(src, sstat = None):
943 """ 1146 """
944 Ensures src is the only hardlink to this file. Other hardlinks, 1147 Ensures ``src`` is the only hardlink to this file. Other hardlinks,
945 if any, are not affected (other than in their st_nlink value, of 1148 if any, are not affected (other than in their st_nlink value, of
946 course). Returns true on success and false on failure. 1149 course).
1150
1151 Arguments:
1152
1153 - ``src``: source file path.
1154 - ``sstat``: os.stat_result to use when checking if the file is a link.
947 1155
1156 Returns ``True`` on success and ``False`` on failure.
948 """ 1157 """
949 try: 1158 try:
950 if not sstat: 1159 if not sstat:
@@ -958,11 +1167,24 @@ def break_hardlinks(src, sstat = None):
958 1167
959def which(path, item, direction = 0, history = False, executable=False): 1168def which(path, item, direction = 0, history = False, executable=False):
960 """ 1169 """
961 Locate `item` in the list of paths `path` (colon separated string like $PATH). 1170 Locate ``item`` in the list of paths ``path`` (colon separated string like
962 If `direction` is non-zero then the list is reversed. 1171 ``$PATH``).
963 If `history` is True then the list of candidates also returned as result,history. 1172
964 If `executable` is True then the candidate has to be an executable file, 1173 Arguments:
965 otherwise the candidate simply has to exist. 1174
1175 - ``path``: list of colon-separated paths.
1176 - ``item``: string to search for.
1177 - ``direction``: if non-zero then the list is reversed.
1178 - ``history``: if ``True`` then the list of candidates also returned as
1179 ``result,history`` where ``history`` is the list of previous path
1180 checked.
1181 - ``executable``: if ``True`` then the candidate defined by ``path`` has
1182 to be an executable file, otherwise if ``False`` the candidate simply
1183 has to exist.
1184
1185 Returns the item if found in the list of path, otherwise an empty string.
1186 If ``history`` is ``True``, return the list of previous path checked in a
1187 tuple with the found (or not found) item as ``(item, history)``.
966 """ 1188 """
967 1189
968 if executable: 1190 if executable:
@@ -989,10 +1211,29 @@ def which(path, item, direction = 0, history = False, executable=False):
989 return "", hist 1211 return "", hist
990 return "" 1212 return ""
991 1213
1214def to_filemode(input):
1215 """
1216 Take a bitbake variable contents defining a file mode and return
1217 the proper python representation of the number
1218
1219 Arguments:
1220
1221 - ``input``: a string or number to convert, e.g. a bitbake variable
1222 string, assumed to be an octal representation
1223
1224 Returns the python file mode as a number
1225 """
1226 # umask might come in as a number or text string..
1227 if type(input) is int:
1228 return input
1229 return int(input, 8)
1230
992@contextmanager 1231@contextmanager
993def umask(new_mask): 1232def umask(new_mask):
994 """ 1233 """
995 Context manager to set the umask to a specific mask, and restore it afterwards. 1234 Context manager to set the umask to a specific mask, and restore it afterwards.
1235
1236 No return value.
996 """ 1237 """
997 current_mask = os.umask(new_mask) 1238 current_mask = os.umask(new_mask)
998 try: 1239 try:
@@ -1003,7 +1244,17 @@ def umask(new_mask):
1003def to_boolean(string, default=None): 1244def to_boolean(string, default=None):
1004 """ 1245 """
1005 Check input string and return boolean value True/False/None 1246 Check input string and return boolean value True/False/None
1006 depending upon the checks 1247 depending upon the checks.
1248
1249 Arguments:
1250
1251 - ``string``: input string.
1252 - ``default``: default return value if the input ``string`` is ``None``,
1253 ``0``, ``False`` or an empty string.
1254
1255 Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False``
1256 if the string is one of "n", "no", "0", or "false". Return ``default`` if
1257 the input ``string`` is ``None``, ``0``, ``False`` or an empty string.
1007 """ 1258 """
1008 if not string: 1259 if not string:
1009 return default 1260 return default
@@ -1024,18 +1275,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
1024 1275
1025 Arguments: 1276 Arguments:
1026 1277
1027 variable -- the variable name. This will be fetched and expanded (using 1278 - ``variable``: the variable name. This will be fetched and expanded (using
1028 d.getVar(variable)) and then split into a set(). 1279 d.getVar(variable)) and then split into a set().
1029 1280 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1030 checkvalues -- if this is a string it is split on whitespace into a set(), 1281 otherwise coerced directly into a set().
1031 otherwise coerced directly into a set(). 1282 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1032 1283 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1033 truevalue -- the value to return if checkvalues is a subset of variable. 1284 not a subset of variable.
1034 1285 - ``d``: the data store.
1035 falsevalue -- the value to return if variable is empty or if checkvalues is
1036 not a subset of variable.
1037 1286
1038 d -- the data store. 1287 Returns ``True`` if the variable contains the values specified, ``False``
1288 otherwise.
1039 """ 1289 """
1040 1290
1041 val = d.getVar(variable) 1291 val = d.getVar(variable)
@@ -1055,18 +1305,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1055 1305
1056 Arguments: 1306 Arguments:
1057 1307
1058 variable -- the variable name. This will be fetched and expanded (using 1308 - ``variable``: the variable name. This will be fetched and expanded (using
1059 d.getVar(variable)) and then split into a set(). 1309 d.getVar(variable)) and then split into a set().
1310 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1311 otherwise coerced directly into a set().
1312 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1313 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1314 not a subset of variable.
1315 - ``d``: the data store.
1060 1316
1061 checkvalues -- if this is a string it is split on whitespace into a set(), 1317 Returns ``True`` if the variable contains any of the values specified,
1062 otherwise coerced directly into a set(). 1318 ``False`` otherwise.
1063
1064 truevalue -- the value to return if checkvalues is a subset of variable.
1065
1066 falsevalue -- the value to return if variable is empty or if checkvalues is
1067 not a subset of variable.
1068
1069 d -- the data store.
1070 """ 1319 """
1071 val = d.getVar(variable) 1320 val = d.getVar(variable)
1072 if not val: 1321 if not val:
@@ -1081,17 +1330,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1081 return falsevalue 1330 return falsevalue
1082 1331
1083def filter(variable, checkvalues, d): 1332def filter(variable, checkvalues, d):
1084 """Return all words in the variable that are present in the checkvalues. 1333 """Return all words in the variable that are present in the ``checkvalues``.
1085 1334
1086 Arguments: 1335 Arguments:
1087 1336
1088 variable -- the variable name. This will be fetched and expanded (using 1337 - ``variable``: the variable name. This will be fetched and expanded (using
1089 d.getVar(variable)) and then split into a set(). 1338 d.getVar(variable)) and then split into a set().
1339 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1340 otherwise coerced directly into a set().
1341 - ``d``: the data store.
1090 1342
1091 checkvalues -- if this is a string it is split on whitespace into a set(), 1343 Returns a list of string.
1092 otherwise coerced directly into a set().
1093
1094 d -- the data store.
1095 """ 1344 """
1096 1345
1097 val = d.getVar(variable) 1346 val = d.getVar(variable)
@@ -1107,8 +1356,27 @@ def filter(variable, checkvalues, d):
1107 1356
1108def get_referenced_vars(start_expr, d): 1357def get_referenced_vars(start_expr, d):
1109 """ 1358 """
1110 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level 1359 Get the names of the variables referenced in a given expression.
1111 are ordered arbitrarily) 1360
1361 Arguments:
1362
1363 - ``start_expr``: the expression where to look for variables references.
1364
1365 For example::
1366
1367 ${VAR_A} string ${VAR_B}
1368
1369 Or::
1370
1371 ${@d.getVar('VAR')}
1372
1373 If a variables makes references to other variables, the latter are also
1374 returned recursively.
1375
1376 - ``d``: the data store.
1377
1378 Returns the names of vars referenced in ``start_expr`` (recursively), in
1379 quasi-BFS order (variables within the same level are ordered arbitrarily).
1112 """ 1380 """
1113 1381
1114 seen = set() 1382 seen = set()
@@ -1188,7 +1456,9 @@ def multiprocessingpool(*args, **kwargs):
1188 return multiprocessing.Pool(*args, **kwargs) 1456 return multiprocessing.Pool(*args, **kwargs)
1189 1457
1190def exec_flat_python_func(func, *args, **kwargs): 1458def exec_flat_python_func(func, *args, **kwargs):
1191 """Execute a flat python function (defined with def funcname(args):...)""" 1459 """Execute a flat python function (defined with ``def funcname(args): ...``)
1460
1461 Returns the return value of the function."""
1192 # Prepare a small piece of python code which calls the requested function 1462 # Prepare a small piece of python code which calls the requested function
1193 # To do this we need to prepare two things - a set of variables we can use to pass 1463 # To do this we need to prepare two things - a set of variables we can use to pass
1194 # the values of arguments into the calling function, and the list of arguments for 1464 # the values of arguments into the calling function, and the list of arguments for
@@ -1214,48 +1484,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1214 """Edit lines from a recipe or config file and modify one or more 1484 """Edit lines from a recipe or config file and modify one or more
1215 specified variable values set in the file using a specified callback 1485 specified variable values set in the file using a specified callback
1216 function. Lines are expected to have trailing newlines. 1486 function. Lines are expected to have trailing newlines.
1217 Parameters: 1487
1218 meta_lines: lines from the file; can be a list or an iterable 1488 Arguments:
1219 (e.g. file pointer) 1489
1220 variables: a list of variable names to look for. Functions 1490 - ``meta_lines``: lines from the file; can be a list or an iterable
1221 may also be specified, but must be specified with '()' at 1491 (e.g. file pointer)
1222 the end of the name. Note that the function doesn't have 1492 - ``variables``: a list of variable names to look for. Functions
1223 any intrinsic understanding of :append, :prepend, :remove, 1493 may also be specified, but must be specified with ``()`` at
1224 or overrides, so these are considered as part of the name. 1494 the end of the name. Note that the function doesn't have
1225 These values go into a regular expression, so regular 1495 any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``,
1226 expression syntax is allowed. 1496 or overrides, so these are considered as part of the name.
1227 varfunc: callback function called for every variable matching 1497 These values go into a regular expression, so regular
1228 one of the entries in the variables parameter. The function 1498 expression syntax is allowed.
1229 should take four arguments: 1499 - ``varfunc``: callback function called for every variable matching
1230 varname: name of variable matched 1500 one of the entries in the variables parameter.
1231 origvalue: current value in file 1501
1232 op: the operator (e.g. '+=') 1502 The function should take four arguments:
1233 newlines: list of lines up to this point. You can use 1503
1234 this to prepend lines before this variable setting 1504 - ``varname``: name of variable matched
1235 if you wish. 1505 - ``origvalue``: current value in file
1236 and should return a four-element tuple: 1506 - ``op``: the operator (e.g. ``+=``)
1237 newvalue: new value to substitute in, or None to drop 1507 - ``newlines``: list of lines up to this point. You can use
1238 the variable setting entirely. (If the removal 1508 this to prepend lines before this variable setting
1239 results in two consecutive blank lines, one of the 1509 if you wish.
1240 blank lines will also be dropped). 1510
1241 newop: the operator to use - if you specify None here, 1511 And should return a four-element tuple:
1242 the original operation will be used. 1512
1243 indent: number of spaces to indent multi-line entries, 1513 - ``newvalue``: new value to substitute in, or ``None`` to drop
1244 or -1 to indent up to the level of the assignment 1514 the variable setting entirely. (If the removal
1245 and opening quote, or a string to use as the indent. 1515 results in two consecutive blank lines, one of the
1246 minbreak: True to allow the first element of a 1516 blank lines will also be dropped).
1247 multi-line value to continue on the same line as 1517 - ``newop``: the operator to use - if you specify ``None`` here,
1248 the assignment, False to indent before the first 1518 the original operation will be used.
1249 element. 1519 - ``indent``: number of spaces to indent multi-line entries,
1250 To clarify, if you wish not to change the value, then you 1520 or ``-1`` to indent up to the level of the assignment
1251 would return like this: return origvalue, None, 0, True 1521 and opening quote, or a string to use as the indent.
1252 match_overrides: True to match items with _overrides on the end, 1522 - ``minbreak``: ``True`` to allow the first element of a
1253 False otherwise 1523 multi-line value to continue on the same line as
1524 the assignment, ``False`` to indent before the first
1525 element.
1526
1527 To clarify, if you wish not to change the value, then you
1528 would return like this::
1529
1530 return origvalue, None, 0, True
1531 - ``match_overrides``: True to match items with _overrides on the end,
1532 False otherwise
1533
1254 Returns a tuple: 1534 Returns a tuple:
1255 updated: 1535
1256 True if changes were made, False otherwise. 1536 - ``updated``: ``True`` if changes were made, ``False`` otherwise.
1257 newlines: 1537 - ``newlines``: Lines after processing.
1258 Lines after processing
1259 """ 1538 """
1260 1539
1261 var_res = {} 1540 var_res = {}
@@ -1399,12 +1678,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1399 1678
1400 1679
1401def edit_metadata_file(meta_file, variables, varfunc): 1680def edit_metadata_file(meta_file, variables, varfunc):
1402 """Edit a recipe or config file and modify one or more specified 1681 """Edit a recipe or configuration file and modify one or more specified
1403 variable values set in the file using a specified callback function. 1682 variable values set in the file using a specified callback function.
1404 The file is only written to if the value(s) actually change. 1683 The file is only written to if the value(s) actually change.
1405 This is basically the file version of edit_metadata(), see that 1684 This is basically the file version of ``bb.utils.edit_metadata()``, see that
1406 function's description for parameter/usage information. 1685 function's description for parameter/usage information.
1407 Returns True if the file was written to, False otherwise. 1686
1687 Returns ``True`` if the file was written to, ``False`` otherwise.
1408 """ 1688 """
1409 with open(meta_file, 'r') as f: 1689 with open(meta_file, 'r') as f:
1410 (updated, newlines) = edit_metadata(f, variables, varfunc) 1690 (updated, newlines) = edit_metadata(f, variables, varfunc)
@@ -1415,23 +1695,25 @@ def edit_metadata_file(meta_file, variables, varfunc):
1415 1695
1416 1696
1417def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): 1697def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1418 """Edit bblayers.conf, adding and/or removing layers 1698 """Edit ``bblayers.conf``, adding and/or removing layers.
1419 Parameters: 1699
1420 bblayers_conf: path to bblayers.conf file to edit 1700 Arguments:
1421 add: layer path (or list of layer paths) to add; None or empty 1701
1422 list to add nothing 1702 - ``bblayers_conf``: path to ``bblayers.conf`` file to edit
1423 remove: layer path (or list of layer paths) to remove; None or 1703 - ``add``: layer path (or list of layer paths) to add; ``None`` or empty
1424 empty list to remove nothing 1704 list to add nothing
1425 edit_cb: optional callback function that will be called after 1705 - ``remove``: layer path (or list of layer paths) to remove; ``None`` or
1426 processing adds/removes once per existing entry. 1706 empty list to remove nothing
1707 - ``edit_cb``: optional callback function that will be called
1708 after processing adds/removes once per existing entry.
1709
1427 Returns a tuple: 1710 Returns a tuple:
1428 notadded: list of layers specified to be added but weren't
1429 (because they were already in the list)
1430 notremoved: list of layers that were specified to be removed
1431 but weren't (because they weren't in the list)
1432 """
1433 1711
1434 import fnmatch 1712 - ``notadded``: list of layers specified to be added but weren't
1713 (because they were already in the list)
1714 - ``notremoved``: list of layers that were specified to be removed
1715 but weren't (because they weren't in the list)
1716 """
1435 1717
1436 def remove_trailing_sep(pth): 1718 def remove_trailing_sep(pth):
1437 if pth and pth[-1] == os.sep: 1719 if pth and pth[-1] == os.sep:
@@ -1550,7 +1832,22 @@ def get_collection_res(d):
1550 1832
1551 1833
1552def get_file_layer(filename, d, collection_res={}): 1834def get_file_layer(filename, d, collection_res={}):
1553 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" 1835 """Determine the collection (or layer name, as defined by a layer's
1836 ``layer.conf`` file) containing the specified file.
1837
1838 Arguments:
1839
1840 - ``filename``: the filename to look for.
1841 - ``d``: the data store.
1842 - ``collection_res``: dictionary with the layer names as keys and file
1843 patterns to match as defined with the BBFILE_COLLECTIONS and
1844 BBFILE_PATTERN variables respectively. The return value of
1845 ``bb.utils.get_collection_res()`` is the default if this variable is
1846 not specified.
1847
1848 Returns the layer name containing the file. If multiple layers contain the
1849 file, the last matching layer name from collection_res is returned.
1850 """
1554 if not collection_res: 1851 if not collection_res:
1555 collection_res = get_collection_res(d) 1852 collection_res = get_collection_res(d)
1556 1853
@@ -1588,7 +1885,13 @@ class PrCtlError(Exception):
1588 1885
1589def signal_on_parent_exit(signame): 1886def signal_on_parent_exit(signame):
1590 """ 1887 """
1591 Trigger signame to be sent when the parent process dies 1888 Trigger ``signame`` to be sent when the parent process dies.
1889
1890 Arguments:
1891
1892 - ``signame``: name of the signal. See ``man signal``.
1893
1894 No return value.
1592 """ 1895 """
1593 signum = getattr(signal, signame) 1896 signum = getattr(signal, signame)
1594 # http://linux.die.net/man/2/prctl 1897 # http://linux.die.net/man/2/prctl
@@ -1623,7 +1926,7 @@ def ioprio_set(who, cls, value):
1623 bb.warn("Unable to set IO Prio for arch %s" % _unamearch) 1926 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1624 1927
1625def set_process_name(name): 1928def set_process_name(name):
1626 from ctypes import cdll, byref, create_string_buffer 1929 from ctypes import byref, create_string_buffer
1627 # This is nice to have for debugging, not essential 1930 # This is nice to have for debugging, not essential
1628 try: 1931 try:
1629 libc = cdll.LoadLibrary('libc.so.6') 1932 libc = cdll.LoadLibrary('libc.so.6')
@@ -1675,6 +1978,13 @@ def disable_network(uid=None, gid=None):
1675 Disable networking in the current process if the kernel supports it, else 1978 Disable networking in the current process if the kernel supports it, else
1676 just return after logging to debug. To do this we need to create a new user 1979 just return after logging to debug. To do this we need to create a new user
1677 namespace, then map back to the original uid/gid. 1980 namespace, then map back to the original uid/gid.
1981
1982 Arguments:
1983
1984 - ``uid``: original user id.
1985 - ``gid``: original user group id.
1986
1987 No return value.
1678 """ 1988 """
1679 libc = ctypes.CDLL('libc.so.6') 1989 libc = ctypes.CDLL('libc.so.6')
1680 1990
@@ -1744,9 +2054,14 @@ class LogCatcher(logging.Handler):
1744 2054
1745def is_semver(version): 2055def is_semver(version):
1746 """ 2056 """
1747 Is the version string following the semver semantic? 2057 Arguments:
2058
2059 - ``version``: the version string.
2060
2061 Returns ``True`` if the version string follow semantic versioning, ``False``
2062 otherwise.
1748 2063
1749 https://semver.org/spec/v2.0.0.html 2064 See https://semver.org/spec/v2.0.0.html.
1750 """ 2065 """
1751 regex = re.compile( 2066 regex = re.compile(
1752 r""" 2067 r"""
@@ -1784,6 +2099,8 @@ def rename(src, dst):
1784def environment(**envvars): 2099def environment(**envvars):
1785 """ 2100 """
1786 Context manager to selectively update the environment with the specified mapping. 2101 Context manager to selectively update the environment with the specified mapping.
2102
2103 No return value.
1787 """ 2104 """
1788 backup = dict(os.environ) 2105 backup = dict(os.environ)
1789 try: 2106 try:
@@ -1800,6 +2117,13 @@ def is_local_uid(uid=''):
1800 """ 2117 """
1801 Check whether uid is a local one or not. 2118 Check whether uid is a local one or not.
1802 Can't use pwd module since it gets all UIDs, not local ones only. 2119 Can't use pwd module since it gets all UIDs, not local ones only.
2120
2121 Arguments:
2122
2123 - ``uid``: user id. If not specified the user id is determined from
2124 ``os.getuid()``.
2125
2126 Returns ``True`` is the user id is local, ``False`` otherwise.
1803 """ 2127 """
1804 if not uid: 2128 if not uid:
1805 uid = os.getuid() 2129 uid = os.getuid()
@@ -1814,7 +2138,7 @@ def is_local_uid(uid=''):
1814 2138
1815def mkstemp(suffix=None, prefix=None, dir=None, text=False): 2139def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1816 """ 2140 """
1817 Generates a unique filename, independent of time. 2141 Generates a unique temporary file, independent of time.
1818 2142
1819 mkstemp() in glibc (at least) generates unique file names based on the 2143 mkstemp() in glibc (at least) generates unique file names based on the
1820 current system time. When combined with highly parallel builds, and 2144 current system time. When combined with highly parallel builds, and
@@ -1823,6 +2147,18 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1823 2147
1824 This function adds additional entropy to the file name so that a collision 2148 This function adds additional entropy to the file name so that a collision
1825 is independent of time and thus extremely unlikely. 2149 is independent of time and thus extremely unlikely.
2150
2151 Arguments:
2152
2153 - ``suffix``: filename suffix.
2154 - ``prefix``: filename prefix.
2155 - ``dir``: directory where the file will be created.
2156 - ``text``: if ``True``, the file is opened in text mode.
2157
2158 Returns a tuple containing:
2159
2160 - the file descriptor for the created file
2161 - the name of the file.
1826 """ 2162 """
1827 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20)) 2163 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1828 if prefix: 2164 if prefix:
@@ -1833,12 +2169,20 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1833 2169
1834def path_is_descendant(descendant, ancestor): 2170def path_is_descendant(descendant, ancestor):
1835 """ 2171 """
1836 Returns True if the path `descendant` is a descendant of `ancestor` 2172 Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor``
1837 (including being equivalent to `ancestor` itself). Otherwise returns False. 2173 (including being equivalent to ``ancestor`` itself). Otherwise returns
2174 ``False``.
2175
1838 Correctly accounts for symlinks, bind mounts, etc. by using 2176 Correctly accounts for symlinks, bind mounts, etc. by using
1839 os.path.samestat() to compare paths 2177 ``os.path.samestat()`` to compare paths.
2178
2179 May raise any exception that ``os.stat()`` raises.
1840 2180
1841 May raise any exception that os.stat() raises 2181 Arguments:
2182
2183 - ``descendant``: path to check for being an ancestor.
2184 - ``ancestor``: path to the ancestor ``descendant`` will be checked
2185 against.
1842 """ 2186 """
1843 2187
1844 ancestor_stat = os.stat(ancestor) 2188 ancestor_stat = os.stat(ancestor)
@@ -1857,12 +2201,31 @@ def path_is_descendant(descendant, ancestor):
1857# If we don't have a timeout of some kind and a process/thread exits badly (for example 2201# If we don't have a timeout of some kind and a process/thread exits badly (for example
1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better 2202# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked. 2203# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
2204# This function can still deadlock python since it can't signal the other threads to exit
2205# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
2206# to exit.
1860@contextmanager 2207@contextmanager
1861def lock_timeout(lock): 2208def lock_timeout(lock):
1862 held = lock.acquire(timeout=5*60)
1863 try: 2209 try:
2210 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2211 held = lock.acquire(timeout=5*60)
1864 if not held: 2212 if not held:
2213 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
1865 os._exit(1) 2214 os._exit(1)
1866 yield held 2215 yield held
1867 finally: 2216 finally:
1868 lock.release() 2217 lock.release()
2218 signal.pthread_sigmask(signal.SIG_SETMASK, s)
2219
2220# A version of lock_timeout without the check that the lock was locked and a shorter timeout
2221@contextmanager
2222def lock_timeout_nocheck(lock):
2223 l = False
2224 try:
2225 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2226 l = lock.acquire(timeout=10)
2227 yield l
2228 finally:
2229 if l:
2230 lock.release()
2231 signal.pthread_sigmask(signal.SIG_SETMASK, s)