diff options
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r-- | bitbake/lib/bb/__init__.py | 34 | ||||
-rw-r--r-- | bitbake/lib/bb/asyncrpc/__init__.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/asyncrpc/client.py | 132 | ||||
-rw-r--r-- | bitbake/lib/bb/asyncrpc/serv.py | 37 | ||||
-rw-r--r-- | bitbake/lib/bb/build.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/codeparser.py | 31 | ||||
-rw-r--r-- | bitbake/lib/bb/cooker.py | 33 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/crate.py | 9 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/gcp.py | 1 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/npmsw.py | 2 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/svn.py | 3 | ||||
-rw-r--r-- | bitbake/lib/bb/fetch2/wget.py | 30 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/__init__.py | 12 | ||||
-rw-r--r-- | bitbake/lib/bb/parse/parse_py/BBHandler.py | 8 | ||||
-rw-r--r-- | bitbake/lib/bb/runqueue.py | 102 | ||||
-rw-r--r-- | bitbake/lib/bb/siggen.py | 120 | ||||
-rw-r--r-- | bitbake/lib/bb/tests/codeparser.py | 40 | ||||
-rw-r--r-- | bitbake/lib/bb/tests/fetch.py | 34 | ||||
-rw-r--r-- | bitbake/lib/bb/ui/buildinfohelper.py | 5 |
19 files changed, 398 insertions, 239 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 768cce84e9..574e0de5be 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -9,7 +9,7 @@ | |||
9 | # SPDX-License-Identifier: GPL-2.0-only | 9 | # SPDX-License-Identifier: GPL-2.0-only |
10 | # | 10 | # |
11 | 11 | ||
12 | __version__ = "2.7.3" | 12 | __version__ = "2.9.1" |
13 | 13 | ||
14 | import sys | 14 | import sys |
15 | if sys.version_info < (3, 8, 0): | 15 | if sys.version_info < (3, 8, 0): |
@@ -36,6 +36,7 @@ class BBHandledException(Exception): | |||
36 | 36 | ||
37 | import os | 37 | import os |
38 | import logging | 38 | import logging |
39 | from collections import namedtuple | ||
39 | 40 | ||
40 | 41 | ||
41 | class NullHandler(logging.Handler): | 42 | class NullHandler(logging.Handler): |
@@ -103,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin): | |||
103 | self.setup_bblogger(logger.name) | 104 | self.setup_bblogger(logger.name) |
104 | super().__init__(logger, *args, **kwargs) | 105 | super().__init__(logger, *args, **kwargs) |
105 | 106 | ||
106 | if sys.version_info < (3, 6): | ||
107 | # These properties were added in Python 3.6. Add them in older versions | ||
108 | # for compatibility | ||
109 | @property | ||
110 | def manager(self): | ||
111 | return self.logger.manager | ||
112 | |||
113 | @manager.setter | ||
114 | def manager(self, value): | ||
115 | self.logger.manager = value | ||
116 | |||
117 | @property | ||
118 | def name(self): | ||
119 | return self.logger.name | ||
120 | |||
121 | def __repr__(self): | ||
122 | logger = self.logger | ||
123 | level = logger.getLevelName(logger.getEffectiveLevel()) | ||
124 | return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level) | ||
125 | |||
126 | logging.LoggerAdapter = BBLoggerAdapter | 107 | logging.LoggerAdapter = BBLoggerAdapter |
127 | 108 | ||
128 | logger = logging.getLogger("BitBake") | 109 | logger = logging.getLogger("BitBake") |
@@ -227,3 +208,14 @@ def deprecate_import(current, modulename, fromlist, renames = None): | |||
227 | 208 | ||
228 | setattr(sys.modules[current], newname, newobj) | 209 | setattr(sys.modules[current], newname, newobj) |
229 | 210 | ||
211 | TaskData = namedtuple("TaskData", [ | ||
212 | "pn", | ||
213 | "taskname", | ||
214 | "fn", | ||
215 | "deps", | ||
216 | "provides", | ||
217 | "taskhash", | ||
218 | "unihash", | ||
219 | "hashfn", | ||
220 | "taskhash_deps", | ||
221 | ]) | ||
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py index 639e1607f8..a4371643d7 100644 --- a/bitbake/lib/bb/asyncrpc/__init__.py +++ b/bitbake/lib/bb/asyncrpc/__init__.py | |||
@@ -5,7 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | 7 | ||
8 | from .client import AsyncClient, Client, ClientPool | 8 | from .client import AsyncClient, Client |
9 | from .serv import AsyncServer, AsyncServerConnection | 9 | from .serv import AsyncServer, AsyncServerConnection |
10 | from .connection import DEFAULT_MAX_CHUNK | 10 | from .connection import DEFAULT_MAX_CHUNK |
11 | from .exceptions import ( | 11 | from .exceptions import ( |
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py index 29a5ab76aa..9be49261c0 100644 --- a/bitbake/lib/bb/asyncrpc/client.py +++ b/bitbake/lib/bb/asyncrpc/client.py | |||
@@ -10,11 +10,41 @@ import json | |||
10 | import os | 10 | import os |
11 | import socket | 11 | import socket |
12 | import sys | 12 | import sys |
13 | import re | ||
13 | import contextlib | 14 | import contextlib |
14 | from threading import Thread | 15 | from threading import Thread |
15 | from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK | 16 | from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK |
16 | from .exceptions import ConnectionClosedError, InvokeError | 17 | from .exceptions import ConnectionClosedError, InvokeError |
17 | 18 | ||
19 | UNIX_PREFIX = "unix://" | ||
20 | WS_PREFIX = "ws://" | ||
21 | WSS_PREFIX = "wss://" | ||
22 | |||
23 | ADDR_TYPE_UNIX = 0 | ||
24 | ADDR_TYPE_TCP = 1 | ||
25 | ADDR_TYPE_WS = 2 | ||
26 | |||
27 | WEBSOCKETS_MIN_VERSION = (9, 1) | ||
28 | # Need websockets 10 with python 3.10+ | ||
29 | if sys.version_info >= (3, 10, 0): | ||
30 | WEBSOCKETS_MIN_VERSION = (10, 0) | ||
31 | |||
32 | |||
33 | def parse_address(addr): | ||
34 | if addr.startswith(UNIX_PREFIX): | ||
35 | return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) | ||
36 | elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX): | ||
37 | return (ADDR_TYPE_WS, (addr,)) | ||
38 | else: | ||
39 | m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr) | ||
40 | if m is not None: | ||
41 | host = m.group("host") | ||
42 | port = m.group("port") | ||
43 | else: | ||
44 | host, port = addr.split(":") | ||
45 | |||
46 | return (ADDR_TYPE_TCP, (host, int(port))) | ||
47 | |||
18 | 48 | ||
19 | class AsyncClient(object): | 49 | class AsyncClient(object): |
20 | def __init__( | 50 | def __init__( |
@@ -63,8 +93,30 @@ class AsyncClient(object): | |||
63 | async def connect_websocket(self, uri): | 93 | async def connect_websocket(self, uri): |
64 | import websockets | 94 | import websockets |
65 | 95 | ||
96 | try: | ||
97 | version = tuple( | ||
98 | int(v) | ||
99 | for v in websockets.__version__.split(".")[ | ||
100 | 0 : len(WEBSOCKETS_MIN_VERSION) | ||
101 | ] | ||
102 | ) | ||
103 | except ValueError: | ||
104 | raise ImportError( | ||
105 | f"Unable to parse websockets version '{websockets.__version__}'" | ||
106 | ) | ||
107 | |||
108 | if version < WEBSOCKETS_MIN_VERSION: | ||
109 | min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION) | ||
110 | raise ImportError( | ||
111 | f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}" | ||
112 | ) | ||
113 | |||
66 | async def connect_sock(): | 114 | async def connect_sock(): |
67 | websocket = await websockets.connect(uri, ping_interval=None) | 115 | websocket = await websockets.connect( |
116 | uri, | ||
117 | ping_interval=None, | ||
118 | open_timeout=self.timeout, | ||
119 | ) | ||
68 | return WebsocketConnection(websocket, self.timeout) | 120 | return WebsocketConnection(websocket, self.timeout) |
69 | 121 | ||
70 | self._connect_sock = connect_sock | 122 | self._connect_sock = connect_sock |
@@ -202,85 +254,9 @@ class Client(object): | |||
202 | def close(self): | 254 | def close(self): |
203 | if self.loop: | 255 | if self.loop: |
204 | self.loop.run_until_complete(self.client.close()) | 256 | self.loop.run_until_complete(self.client.close()) |
205 | if sys.version_info >= (3, 6): | ||
206 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
207 | self.loop.close() | ||
208 | self.loop = None | ||
209 | |||
210 | def __enter__(self): | ||
211 | return self | ||
212 | |||
213 | def __exit__(self, exc_type, exc_value, traceback): | ||
214 | self.close() | ||
215 | return False | ||
216 | |||
217 | |||
218 | class ClientPool(object): | ||
219 | def __init__(self, max_clients): | ||
220 | self.avail_clients = [] | ||
221 | self.num_clients = 0 | ||
222 | self.max_clients = max_clients | ||
223 | self.loop = None | ||
224 | self.client_condition = None | ||
225 | |||
226 | @abc.abstractmethod | ||
227 | async def _new_client(self): | ||
228 | raise NotImplementedError("Must be implemented in derived class") | ||
229 | |||
230 | def close(self): | ||
231 | if self.client_condition: | ||
232 | self.client_condition = None | ||
233 | |||
234 | if self.loop: | ||
235 | self.loop.run_until_complete(self.__close_clients()) | ||
236 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | 257 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
237 | self.loop.close() | 258 | self.loop.close() |
238 | self.loop = None | 259 | self.loop = None |
239 | |||
240 | def run_tasks(self, tasks): | ||
241 | if not self.loop: | ||
242 | self.loop = asyncio.new_event_loop() | ||
243 | |||
244 | thread = Thread(target=self.__thread_main, args=(tasks,)) | ||
245 | thread.start() | ||
246 | thread.join() | ||
247 | |||
248 | @contextlib.asynccontextmanager | ||
249 | async def get_client(self): | ||
250 | async with self.client_condition: | ||
251 | if self.avail_clients: | ||
252 | client = self.avail_clients.pop() | ||
253 | elif self.num_clients < self.max_clients: | ||
254 | self.num_clients += 1 | ||
255 | client = await self._new_client() | ||
256 | else: | ||
257 | while not self.avail_clients: | ||
258 | await self.client_condition.wait() | ||
259 | client = self.avail_clients.pop() | ||
260 | |||
261 | try: | ||
262 | yield client | ||
263 | finally: | ||
264 | async with self.client_condition: | ||
265 | self.avail_clients.append(client) | ||
266 | self.client_condition.notify() | ||
267 | |||
268 | def __thread_main(self, tasks): | ||
269 | async def process_task(task): | ||
270 | async with self.get_client() as client: | ||
271 | await task(client) | ||
272 | |||
273 | asyncio.set_event_loop(self.loop) | ||
274 | if not self.client_condition: | ||
275 | self.client_condition = asyncio.Condition() | ||
276 | tasks = [process_task(t) for t in tasks] | ||
277 | self.loop.run_until_complete(asyncio.gather(*tasks)) | ||
278 | |||
279 | async def __close_clients(self): | ||
280 | for c in self.avail_clients: | ||
281 | await c.close() | ||
282 | self.avail_clients = [] | ||
283 | self.num_clients = 0 | ||
284 | 260 | ||
285 | def __enter__(self): | 261 | def __enter__(self): |
286 | return self | 262 | return self |
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py index a66117acad..667217c5c1 100644 --- a/bitbake/lib/bb/asyncrpc/serv.py +++ b/bitbake/lib/bb/asyncrpc/serv.py | |||
@@ -138,14 +138,20 @@ class StreamServer(object): | |||
138 | 138 | ||
139 | 139 | ||
140 | class TCPStreamServer(StreamServer): | 140 | class TCPStreamServer(StreamServer): |
141 | def __init__(self, host, port, handler, logger): | 141 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
142 | super().__init__(handler, logger) | 142 | super().__init__(handler, logger) |
143 | self.host = host | 143 | self.host = host |
144 | self.port = port | 144 | self.port = port |
145 | self.reuseport = reuseport | ||
145 | 146 | ||
146 | def start(self, loop): | 147 | def start(self, loop): |
147 | self.server = loop.run_until_complete( | 148 | self.server = loop.run_until_complete( |
148 | asyncio.start_server(self.handle_stream_client, self.host, self.port) | 149 | asyncio.start_server( |
150 | self.handle_stream_client, | ||
151 | self.host, | ||
152 | self.port, | ||
153 | reuse_port=self.reuseport, | ||
154 | ) | ||
149 | ) | 155 | ) |
150 | 156 | ||
151 | for s in self.server.sockets: | 157 | for s in self.server.sockets: |
@@ -209,11 +215,12 @@ class UnixStreamServer(StreamServer): | |||
209 | 215 | ||
210 | 216 | ||
211 | class WebsocketsServer(object): | 217 | class WebsocketsServer(object): |
212 | def __init__(self, host, port, handler, logger): | 218 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
213 | self.host = host | 219 | self.host = host |
214 | self.port = port | 220 | self.port = port |
215 | self.handler = handler | 221 | self.handler = handler |
216 | self.logger = logger | 222 | self.logger = logger |
223 | self.reuseport = reuseport | ||
217 | 224 | ||
218 | def start(self, loop): | 225 | def start(self, loop): |
219 | import websockets.server | 226 | import websockets.server |
@@ -224,6 +231,7 @@ class WebsocketsServer(object): | |||
224 | self.host, | 231 | self.host, |
225 | self.port, | 232 | self.port, |
226 | ping_interval=None, | 233 | ping_interval=None, |
234 | reuse_port=self.reuseport, | ||
227 | ) | 235 | ) |
228 | ) | 236 | ) |
229 | 237 | ||
@@ -262,14 +270,26 @@ class AsyncServer(object): | |||
262 | self.loop = None | 270 | self.loop = None |
263 | self.run_tasks = [] | 271 | self.run_tasks = [] |
264 | 272 | ||
265 | def start_tcp_server(self, host, port): | 273 | def start_tcp_server(self, host, port, *, reuseport=False): |
266 | self.server = TCPStreamServer(host, port, self._client_handler, self.logger) | 274 | self.server = TCPStreamServer( |
275 | host, | ||
276 | port, | ||
277 | self._client_handler, | ||
278 | self.logger, | ||
279 | reuseport=reuseport, | ||
280 | ) | ||
267 | 281 | ||
268 | def start_unix_server(self, path): | 282 | def start_unix_server(self, path): |
269 | self.server = UnixStreamServer(path, self._client_handler, self.logger) | 283 | self.server = UnixStreamServer(path, self._client_handler, self.logger) |
270 | 284 | ||
271 | def start_websocket_server(self, host, port): | 285 | def start_websocket_server(self, host, port, reuseport=False): |
272 | self.server = WebsocketsServer(host, port, self._client_handler, self.logger) | 286 | self.server = WebsocketsServer( |
287 | host, | ||
288 | port, | ||
289 | self._client_handler, | ||
290 | self.logger, | ||
291 | reuseport=reuseport, | ||
292 | ) | ||
273 | 293 | ||
274 | async def _client_handler(self, socket): | 294 | async def _client_handler(self, socket): |
275 | address = socket.address | 295 | address = socket.address |
@@ -368,8 +388,7 @@ class AsyncServer(object): | |||
368 | 388 | ||
369 | self._serve_forever(tasks) | 389 | self._serve_forever(tasks) |
370 | 390 | ||
371 | if sys.version_info >= (3, 6): | 391 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
372 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
373 | self.loop.close() | 392 | self.loop.close() |
374 | 393 | ||
375 | queue = multiprocessing.Queue() | 394 | queue = multiprocessing.Queue() |
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py index 44d08f5c55..ab8bce3d57 100644 --- a/bitbake/lib/bb/build.py +++ b/bitbake/lib/bb/build.py | |||
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None): | |||
197 | for cdir in d.expand(cleandirs).split(): | 197 | for cdir in d.expand(cleandirs).split(): |
198 | bb.utils.remove(cdir, True) | 198 | bb.utils.remove(cdir, True) |
199 | bb.utils.mkdirhier(cdir) | 199 | bb.utils.mkdirhier(cdir) |
200 | if cdir == oldcwd: | ||
201 | os.chdir(cdir) | ||
200 | 202 | ||
201 | if flags and dirs is None: | 203 | if flags and dirs is None: |
202 | dirs = flags.get('dirs') | 204 | dirs = flags.get('dirs') |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 2e8b7ced3c..691bdff75e 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -484,19 +484,34 @@ class ShellParser(): | |||
484 | """ | 484 | """ |
485 | 485 | ||
486 | words = list(words) | 486 | words = list(words) |
487 | for word in list(words): | 487 | for word in words: |
488 | wtree = pyshlex.make_wordtree(word[1]) | 488 | wtree = pyshlex.make_wordtree(word[1]) |
489 | for part in wtree: | 489 | for part in wtree: |
490 | if not isinstance(part, list): | 490 | if not isinstance(part, list): |
491 | continue | 491 | continue |
492 | 492 | ||
493 | if part[0] in ('`', '$('): | 493 | candidates = [part] |
494 | command = pyshlex.wordtree_as_string(part[1:-1]) | 494 | |
495 | self._parse_shell(command) | 495 | # If command is of type: |
496 | 496 | # | |
497 | if word[0] in ("cmd_name", "cmd_word"): | 497 | # var="... $(cmd [...]) ..." |
498 | if word in words: | 498 | # |
499 | words.remove(word) | 499 | # Then iterate on what's between the quotes and if we find a |
500 | # list, make that what we check for below. | ||
501 | if len(part) >= 3 and part[0] == '"': | ||
502 | for p in part[1:-1]: | ||
503 | if isinstance(p, list): | ||
504 | candidates.append(p) | ||
505 | |||
506 | for candidate in candidates: | ||
507 | if len(candidate) >= 2: | ||
508 | if candidate[0] in ('`', '$('): | ||
509 | command = pyshlex.wordtree_as_string(candidate[1:-1]) | ||
510 | self._parse_shell(command) | ||
511 | |||
512 | if word[0] in ("cmd_name", "cmd_word"): | ||
513 | if word in words: | ||
514 | words.remove(word) | ||
500 | 515 | ||
501 | usetoken = False | 516 | usetoken = False |
502 | for word in words: | 517 | for word in words: |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index c5bfef55d6..6754f986bf 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -315,13 +315,13 @@ class BBCooker: | |||
315 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" | 315 | dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" |
316 | upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None | 316 | upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None |
317 | if upstream: | 317 | if upstream: |
318 | import socket | ||
319 | try: | 318 | try: |
320 | sock = socket.create_connection(upstream.split(":"), 5) | 319 | with hashserv.create_client(upstream) as client: |
321 | sock.close() | 320 | client.ping() |
322 | except socket.error as e: | 321 | except (ConnectionError, ImportError) as e: |
323 | bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" | 322 | bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" |
324 | % (upstream, repr(e))) | 323 | % (upstream, repr(e))) |
324 | upstream = None | ||
325 | 325 | ||
326 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") | 326 | self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") |
327 | self.hashserv = hashserv.create_server( | 327 | self.hashserv = hashserv.create_server( |
@@ -680,14 +680,14 @@ class BBCooker: | |||
680 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) | 680 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) |
681 | return taskdata, runlist | 681 | return taskdata, runlist |
682 | 682 | ||
683 | def prepareTreeData(self, pkgs_to_build, task): | 683 | def prepareTreeData(self, pkgs_to_build, task, halt=False): |
684 | """ | 684 | """ |
685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | 685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build |
686 | """ | 686 | """ |
687 | 687 | ||
688 | # We set halt to False here to prevent unbuildable targets raising | 688 | # We set halt to False here to prevent unbuildable targets raising |
689 | # an exception when we're just generating data | 689 | # an exception when we're just generating data |
690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) | 690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True) |
691 | 691 | ||
692 | return runlist, taskdata | 692 | return runlist, taskdata |
693 | 693 | ||
@@ -701,7 +701,7 @@ class BBCooker: | |||
701 | if not task.startswith("do_"): | 701 | if not task.startswith("do_"): |
702 | task = "do_%s" % task | 702 | task = "do_%s" % task |
703 | 703 | ||
704 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) | 704 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True) |
705 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | 705 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) |
706 | rq.rqdata.prepare() | 706 | rq.rqdata.prepare() |
707 | return self.buildDependTree(rq, taskdata) | 707 | return self.buildDependTree(rq, taskdata) |
@@ -1459,7 +1459,6 @@ class BBCooker: | |||
1459 | 1459 | ||
1460 | if t in task or getAllTaskSignatures: | 1460 | if t in task or getAllTaskSignatures: |
1461 | try: | 1461 | try: |
1462 | rq.rqdata.prepare_task_hash(tid) | ||
1463 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) | 1462 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) |
1464 | except KeyError: | 1463 | except KeyError: |
1465 | sig.append(self.getTaskSignatures(target, [t])[0]) | 1464 | sig.append(self.getTaskSignatures(target, [t])[0]) |
@@ -1813,8 +1812,8 @@ class CookerCollectFiles(object): | |||
1813 | bb.event.fire(CookerExit(), eventdata) | 1812 | bb.event.fire(CookerExit(), eventdata) |
1814 | 1813 | ||
1815 | # We need to track where we look so that we can know when the cache is invalid. There | 1814 | # We need to track where we look so that we can know when the cache is invalid. There |
1816 | # is no nice way to do this, this is horrid. We intercept the os.listdir() | 1815 | # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir() |
1817 | # (or os.scandir() for python 3.6+) calls while we run glob(). | 1816 | # calls while we run glob(). |
1818 | origlistdir = os.listdir | 1817 | origlistdir = os.listdir |
1819 | if hasattr(os, 'scandir'): | 1818 | if hasattr(os, 'scandir'): |
1820 | origscandir = os.scandir | 1819 | origscandir = os.scandir |
@@ -2225,9 +2224,8 @@ class CookerParser(object): | |||
2225 | 2224 | ||
2226 | for process in self.processes: | 2225 | for process in self.processes: |
2227 | process.join() | 2226 | process.join() |
2228 | # Added in 3.7, cleans up zombies | 2227 | # clean up zombies |
2229 | if hasattr(process, "close"): | 2228 | process.close() |
2230 | process.close() | ||
2231 | 2229 | ||
2232 | bb.codeparser.parser_cache_save() | 2230 | bb.codeparser.parser_cache_save() |
2233 | bb.codeparser.parser_cache_savemerge() | 2231 | bb.codeparser.parser_cache_savemerge() |
@@ -2237,12 +2235,13 @@ class CookerParser(object): | |||
2237 | profiles = [] | 2235 | profiles = [] |
2238 | for i in self.process_names: | 2236 | for i in self.process_names: |
2239 | logfile = "profile-parse-%s.log" % i | 2237 | logfile = "profile-parse-%s.log" % i |
2240 | if os.path.exists(logfile): | 2238 | if os.path.exists(logfile) and os.path.getsize(logfile): |
2241 | profiles.append(logfile) | 2239 | profiles.append(logfile) |
2242 | 2240 | ||
2243 | pout = "profile-parse.log.processed" | 2241 | if profiles: |
2244 | bb.utils.process_profilelog(profiles, pout = pout) | 2242 | pout = "profile-parse.log.processed" |
2245 | print("Processed parsing statistics saved to %s" % (pout)) | 2243 | bb.utils.process_profilelog(profiles, pout = pout) |
2244 | print("Processed parsing statistics saved to %s" % (pout)) | ||
2246 | 2245 | ||
2247 | def final_cleanup(self): | 2246 | def final_cleanup(self): |
2248 | if self.syncthread: | 2247 | if self.syncthread: |
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py index 01d49435c3..e611736f06 100644 --- a/bitbake/lib/bb/fetch2/crate.py +++ b/bitbake/lib/bb/fetch2/crate.py | |||
@@ -70,6 +70,7 @@ class Crate(Wget): | |||
70 | host = 'crates.io/api/v1/crates' | 70 | host = 'crates.io/api/v1/crates' |
71 | 71 | ||
72 | ud.url = "https://%s/%s/%s/download" % (host, name, version) | 72 | ud.url = "https://%s/%s/%s/download" % (host, name, version) |
73 | ud.versionsurl = "https://%s/%s/versions" % (host, name) | ||
73 | ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) | 74 | ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) |
74 | if 'name' not in ud.parm: | 75 | if 'name' not in ud.parm: |
75 | ud.parm['name'] = '%s-%s' % (name, version) | 76 | ud.parm['name'] = '%s-%s' % (name, version) |
@@ -139,3 +140,11 @@ class Crate(Wget): | |||
139 | mdpath = os.path.join(bbpath, cratepath, mdfile) | 140 | mdpath = os.path.join(bbpath, cratepath, mdfile) |
140 | with open(mdpath, "w") as f: | 141 | with open(mdpath, "w") as f: |
141 | json.dump(metadata, f) | 142 | json.dump(metadata, f) |
143 | |||
144 | def latest_versionstring(self, ud, d): | ||
145 | from functools import cmp_to_key | ||
146 | json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d)) | ||
147 | versions = [(0, i["num"], "") for i in json_data["versions"]] | ||
148 | versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp)) | ||
149 | |||
150 | return (versions[-1][1], "") | ||
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py index f40ce2eaa5..eb3e0c6a6b 100644 --- a/bitbake/lib/bb/fetch2/gcp.py +++ b/bitbake/lib/bb/fetch2/gcp.py | |||
@@ -23,6 +23,7 @@ import urllib.parse, urllib.error | |||
23 | from bb.fetch2 import FetchMethod | 23 | from bb.fetch2 import FetchMethod |
24 | from bb.fetch2 import FetchError | 24 | from bb.fetch2 import FetchError |
25 | from bb.fetch2 import logger | 25 | from bb.fetch2 import logger |
26 | from bb.fetch2 import runfetchcmd | ||
26 | 27 | ||
27 | class GCP(FetchMethod): | 28 | class GCP(FetchMethod): |
28 | """ | 29 | """ |
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py index ff5f8dc755..b55e885d7b 100644 --- a/bitbake/lib/bb/fetch2/npmsw.py +++ b/bitbake/lib/bb/fetch2/npmsw.py | |||
@@ -268,7 +268,7 @@ class NpmShrinkWrap(FetchMethod): | |||
268 | 268 | ||
269 | def unpack(self, ud, rootdir, d): | 269 | def unpack(self, ud, rootdir, d): |
270 | """Unpack the downloaded dependencies""" | 270 | """Unpack the downloaded dependencies""" |
271 | destdir = d.getVar("S") | 271 | destdir = rootdir |
272 | destsuffix = ud.parm.get("destsuffix") | 272 | destsuffix = ud.parm.get("destsuffix") |
273 | if destsuffix: | 273 | if destsuffix: |
274 | destdir = os.path.join(rootdir, destsuffix) | 274 | destdir = os.path.join(rootdir, destsuffix) |
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py index d40e4d2909..0852108e7d 100644 --- a/bitbake/lib/bb/fetch2/svn.py +++ b/bitbake/lib/bb/fetch2/svn.py | |||
@@ -210,3 +210,6 @@ class Svn(FetchMethod): | |||
210 | 210 | ||
211 | def _build_revision(self, ud, d): | 211 | def _build_revision(self, ud, d): |
212 | return ud.revision | 212 | return ud.revision |
213 | |||
214 | def supports_checksum(self, urldata): | ||
215 | return False | ||
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index dc025800e6..2e92117634 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -87,7 +87,10 @@ class Wget(FetchMethod): | |||
87 | if not ud.localfile: | 87 | if not ud.localfile: |
88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) | 88 | ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) |
89 | 89 | ||
90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp" | 90 | self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30" |
91 | |||
92 | if ud.type == 'ftp' or ud.type == 'ftps': | ||
93 | self.basecmd += " --passive-ftp" | ||
91 | 94 | ||
92 | if not self.check_certs(d): | 95 | if not self.check_certs(d): |
93 | self.basecmd += " --no-check-certificate" | 96 | self.basecmd += " --no-check-certificate" |
@@ -105,7 +108,8 @@ class Wget(FetchMethod): | |||
105 | 108 | ||
106 | fetchcmd = self.basecmd | 109 | fetchcmd = self.basecmd |
107 | 110 | ||
108 | localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" | 111 | dldir = os.path.realpath(d.getVar("DL_DIR")) |
112 | localpath = os.path.join(dldir, ud.localfile) + ".tmp" | ||
109 | bb.utils.mkdirhier(os.path.dirname(localpath)) | 113 | bb.utils.mkdirhier(os.path.dirname(localpath)) |
110 | fetchcmd += " -O %s" % shlex.quote(localpath) | 114 | fetchcmd += " -O %s" % shlex.quote(localpath) |
111 | 115 | ||
@@ -125,12 +129,21 @@ class Wget(FetchMethod): | |||
125 | uri = ud.url.split(";")[0] | 129 | uri = ud.url.split(";")[0] |
126 | if os.path.exists(ud.localpath): | 130 | if os.path.exists(ud.localpath): |
127 | # file exists, but we didnt complete it.. trying again.. | 131 | # file exists, but we didnt complete it.. trying again.. |
128 | fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) | 132 | fetchcmd += " -c -P " + dldir + " '" + uri + "'" |
129 | else: | 133 | else: |
130 | fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) | 134 | fetchcmd += " -P " + dldir + " '" + uri + "'" |
131 | 135 | ||
132 | self._runwget(ud, d, fetchcmd, False) | 136 | self._runwget(ud, d, fetchcmd, False) |
133 | 137 | ||
138 | # Sanity check since wget can pretend it succeed when it didn't | ||
139 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
140 | if not os.path.exists(localpath): | ||
141 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri) | ||
142 | |||
143 | if os.path.getsize(localpath) == 0: | ||
144 | os.remove(localpath) | ||
145 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
146 | |||
134 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the | 147 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the |
135 | # original file, which might be a race (imagine two recipes referencing the same | 148 | # original file, which might be a race (imagine two recipes referencing the same |
136 | # source, one with an incorrect checksum) | 149 | # source, one with an incorrect checksum) |
@@ -140,15 +153,6 @@ class Wget(FetchMethod): | |||
140 | # Our lock prevents multiple writers but mirroring code may grab incomplete files | 153 | # Our lock prevents multiple writers but mirroring code may grab incomplete files |
141 | os.rename(localpath, localpath[:-4]) | 154 | os.rename(localpath, localpath[:-4]) |
142 | 155 | ||
143 | # Sanity check since wget can pretend it succeed when it didn't | ||
144 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
145 | if not os.path.exists(ud.localpath): | ||
146 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) | ||
147 | |||
148 | if os.path.getsize(ud.localpath) == 0: | ||
149 | os.remove(ud.localpath) | ||
150 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
151 | |||
152 | return True | 156 | return True |
153 | 157 | ||
154 | def checkstatus(self, fetch, ud, d, try_again=True): | 158 | def checkstatus(self, fetch, ud, d, try_again=True): |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index a4358f1374..7ffdaa6fd7 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe): | |||
49 | __mtime_cache = {} | 49 | __mtime_cache = {} |
50 | def cached_mtime(f): | 50 | def cached_mtime(f): |
51 | if f not in __mtime_cache: | 51 | if f not in __mtime_cache: |
52 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 52 | res = os.stat(f) |
53 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
53 | return __mtime_cache[f] | 54 | return __mtime_cache[f] |
54 | 55 | ||
55 | def cached_mtime_noerror(f): | 56 | def cached_mtime_noerror(f): |
56 | if f not in __mtime_cache: | 57 | if f not in __mtime_cache: |
57 | try: | 58 | try: |
58 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 59 | res = os.stat(f) |
60 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
59 | except OSError: | 61 | except OSError: |
60 | return 0 | 62 | return 0 |
61 | return __mtime_cache[f] | 63 | return __mtime_cache[f] |
62 | 64 | ||
63 | def check_mtime(f, mtime): | 65 | def check_mtime(f, mtime): |
64 | try: | 66 | try: |
65 | current_mtime = os.stat(f)[stat.ST_MTIME] | 67 | res = os.stat(f) |
68 | current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
66 | __mtime_cache[f] = current_mtime | 69 | __mtime_cache[f] = current_mtime |
67 | except OSError: | 70 | except OSError: |
68 | current_mtime = 0 | 71 | current_mtime = 0 |
@@ -70,7 +73,8 @@ def check_mtime(f, mtime): | |||
70 | 73 | ||
71 | def update_mtime(f): | 74 | def update_mtime(f): |
72 | try: | 75 | try: |
73 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 76 | res = os.stat(f) |
77 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
74 | except OSError: | 78 | except OSError: |
75 | if f in __mtime_cache: | 79 | if f in __mtime_cache: |
76 | del __mtime_cache[f] | 80 | del __mtime_cache[f] |
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py index cd1c998f8f..c13e4b9755 100644 --- a/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
@@ -34,6 +34,7 @@ __infunc__ = [] | |||
34 | __inpython__ = False | 34 | __inpython__ = False |
35 | __body__ = [] | 35 | __body__ = [] |
36 | __classname__ = "" | 36 | __classname__ = "" |
37 | __residue__ = [] | ||
37 | 38 | ||
38 | cached_statements = {} | 39 | cached_statements = {} |
39 | 40 | ||
@@ -80,7 +81,7 @@ def inherit(files, fn, lineno, d, deferred=False): | |||
80 | __inherit_cache = d.getVar('__inherit_cache', False) or [] | 81 | __inherit_cache = d.getVar('__inherit_cache', False) or [] |
81 | 82 | ||
82 | def get_statements(filename, absolute_filename, base_name): | 83 | def get_statements(filename, absolute_filename, base_name): |
83 | global cached_statements | 84 | global cached_statements, __residue__, __body__ |
84 | 85 | ||
85 | try: | 86 | try: |
86 | return cached_statements[absolute_filename] | 87 | return cached_statements[absolute_filename] |
@@ -100,6 +101,11 @@ def get_statements(filename, absolute_filename, base_name): | |||
100 | # add a blank line to close out any python definition | 101 | # add a blank line to close out any python definition |
101 | feeder(lineno, "", filename, base_name, statements, eof=True) | 102 | feeder(lineno, "", filename, base_name, statements, eof=True) |
102 | 103 | ||
104 | if __residue__: | ||
105 | raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno) | ||
106 | if __body__: | ||
107 | raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno) | ||
108 | |||
103 | if filename.endswith(".bbclass") or filename.endswith(".inc"): | 109 | if filename.endswith(".bbclass") or filename.endswith(".inc"): |
104 | cached_statements[absolute_filename] = statements | 110 | cached_statements[absolute_filename] = statements |
105 | return statements | 111 | return statements |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index bc7e18175d..3462ed4457 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -1273,27 +1273,41 @@ class RunQueueData: | |||
1273 | 1273 | ||
1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) | 1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) |
1275 | 1275 | ||
1276 | starttime = time.time() | ||
1277 | lasttime = starttime | ||
1278 | |||
1276 | # Iterate over the task list and call into the siggen code | 1279 | # Iterate over the task list and call into the siggen code |
1277 | dealtwith = set() | 1280 | dealtwith = set() |
1278 | todeal = set(self.runtaskentries) | 1281 | todeal = set(self.runtaskentries) |
1279 | while todeal: | 1282 | while todeal: |
1283 | ready = set() | ||
1280 | for tid in todeal.copy(): | 1284 | for tid in todeal.copy(): |
1281 | if not (self.runtaskentries[tid].depends - dealtwith): | 1285 | if not (self.runtaskentries[tid].depends - dealtwith): |
1282 | dealtwith.add(tid) | 1286 | self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1283 | todeal.remove(tid) | 1287 | # get_taskhash for a given tid *must* be called before get_unihash* below |
1284 | self.prepare_task_hash(tid) | 1288 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1285 | bb.event.check_for_interrupts(self.cooker.data) | 1289 | ready.add(tid) |
1290 | unihashes = bb.parse.siggen.get_unihashes(ready) | ||
1291 | for tid in ready: | ||
1292 | dealtwith.add(tid) | ||
1293 | todeal.remove(tid) | ||
1294 | self.runtaskentries[tid].unihash = unihashes[tid] | ||
1295 | |||
1296 | bb.event.check_for_interrupts(self.cooker.data) | ||
1297 | |||
1298 | if time.time() > (lasttime + 30): | ||
1299 | lasttime = time.time() | ||
1300 | hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime)) | ||
1301 | |||
1302 | endtime = time.time() | ||
1303 | if (endtime-starttime > 60): | ||
1304 | hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime)) | ||
1286 | 1305 | ||
1287 | bb.parse.siggen.writeout_file_checksum_cache() | 1306 | bb.parse.siggen.writeout_file_checksum_cache() |
1288 | 1307 | ||
1289 | #self.dump_data() | 1308 | #self.dump_data() |
1290 | return len(self.runtaskentries) | 1309 | return len(self.runtaskentries) |
1291 | 1310 | ||
1292 | def prepare_task_hash(self, tid): | ||
1293 | bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1294 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1295 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) | ||
1296 | |||
1297 | def dump_data(self): | 1311 | def dump_data(self): |
1298 | """ | 1312 | """ |
1299 | Dump some debug information on the internal data structures | 1313 | Dump some debug information on the internal data structures |
@@ -2438,14 +2452,17 @@ class RunQueueExecute: | |||
2438 | taskdepdata_cache = {} | 2452 | taskdepdata_cache = {} |
2439 | for task in self.rqdata.runtaskentries: | 2453 | for task in self.rqdata.runtaskentries: |
2440 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | 2454 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) |
2441 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2455 | taskdepdata_cache[task] = bb.TaskData( |
2442 | deps = self.rqdata.runtaskentries[task].depends | 2456 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2443 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2457 | taskname = taskname, |
2444 | taskhash = self.rqdata.runtaskentries[task].hash | 2458 | fn = fn, |
2445 | unihash = self.rqdata.runtaskentries[task].unihash | 2459 | deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends), |
2446 | deps = self.filtermcdeps(task, mc, deps) | 2460 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], |
2447 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2461 | taskhash = self.rqdata.runtaskentries[task].hash, |
2448 | taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | 2462 | unihash = self.rqdata.runtaskentries[task].unihash, |
2463 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], | ||
2464 | taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps, | ||
2465 | ) | ||
2449 | 2466 | ||
2450 | self.taskdepdata_cache = taskdepdata_cache | 2467 | self.taskdepdata_cache = taskdepdata_cache |
2451 | 2468 | ||
@@ -2460,9 +2477,11 @@ class RunQueueExecute: | |||
2460 | while next: | 2477 | while next: |
2461 | additional = [] | 2478 | additional = [] |
2462 | for revdep in next: | 2479 | for revdep in next: |
2463 | self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash | 2480 | self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace( |
2481 | unihash=self.rqdata.runtaskentries[revdep].unihash | ||
2482 | ) | ||
2464 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] | 2483 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] |
2465 | for revdep2 in self.taskdepdata_cache[revdep][3]: | 2484 | for revdep2 in self.taskdepdata_cache[revdep].deps: |
2466 | if revdep2 not in taskdepdata: | 2485 | if revdep2 not in taskdepdata: |
2467 | additional.append(revdep2) | 2486 | additional.append(revdep2) |
2468 | next = additional | 2487 | next = additional |
@@ -2531,9 +2550,6 @@ class RunQueueExecute: | |||
2531 | self.rqdata.runtaskentries[hashtid].unihash = unihash | 2550 | self.rqdata.runtaskentries[hashtid].unihash = unihash |
2532 | bb.parse.siggen.set_unihash(hashtid, unihash) | 2551 | bb.parse.siggen.set_unihash(hashtid, unihash) |
2533 | toprocess.add(hashtid) | 2552 | toprocess.add(hashtid) |
2534 | if torehash: | ||
2535 | # Need to save after set_unihash above | ||
2536 | bb.parse.siggen.save_unitaskhashes() | ||
2537 | 2553 | ||
2538 | # Work out all tasks which depend upon these | 2554 | # Work out all tasks which depend upon these |
2539 | total = set() | 2555 | total = set() |
@@ -2556,17 +2572,28 @@ class RunQueueExecute: | |||
2556 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): | 2572 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): |
2557 | next.add(p) | 2573 | next.add(p) |
2558 | 2574 | ||
2575 | starttime = time.time() | ||
2576 | lasttime = starttime | ||
2577 | |||
2559 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled | 2578 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled |
2560 | while next: | 2579 | while next: |
2561 | current = next.copy() | 2580 | current = next.copy() |
2562 | next = set() | 2581 | next = set() |
2582 | ready = {} | ||
2563 | for tid in current: | 2583 | for tid in current: |
2564 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): | 2584 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): |
2565 | continue | 2585 | continue |
2586 | # get_taskhash for a given tid *must* be called before get_unihash* below | ||
2587 | ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | ||
2588 | |||
2589 | unihashes = bb.parse.siggen.get_unihashes(ready.keys()) | ||
2590 | |||
2591 | for tid in ready: | ||
2566 | orighash = self.rqdata.runtaskentries[tid].hash | 2592 | orighash = self.rqdata.runtaskentries[tid].hash |
2567 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | 2593 | newhash = ready[tid] |
2568 | origuni = self.rqdata.runtaskentries[tid].unihash | 2594 | origuni = self.rqdata.runtaskentries[tid].unihash |
2569 | newuni = bb.parse.siggen.get_unihash(tid) | 2595 | newuni = unihashes[tid] |
2596 | |||
2570 | # FIXME, need to check it can come from sstate at all for determinism? | 2597 | # FIXME, need to check it can come from sstate at all for determinism? |
2571 | remapped = False | 2598 | remapped = False |
2572 | if newuni == origuni: | 2599 | if newuni == origuni: |
@@ -2587,6 +2614,15 @@ class RunQueueExecute: | |||
2587 | next |= self.rqdata.runtaskentries[tid].revdeps | 2614 | next |= self.rqdata.runtaskentries[tid].revdeps |
2588 | total.remove(tid) | 2615 | total.remove(tid) |
2589 | next.intersection_update(total) | 2616 | next.intersection_update(total) |
2617 | bb.event.check_for_interrupts(self.cooker.data) | ||
2618 | |||
2619 | if time.time() > (lasttime + 30): | ||
2620 | lasttime = time.time() | ||
2621 | hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime)) | ||
2622 | |||
2623 | endtime = time.time() | ||
2624 | if (endtime-starttime > 60): | ||
2625 | hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime)) | ||
2590 | 2626 | ||
2591 | if changed: | 2627 | if changed: |
2592 | for mc in self.rq.worker: | 2628 | for mc in self.rq.worker: |
@@ -2806,13 +2842,19 @@ class RunQueueExecute: | |||
2806 | additional = [] | 2842 | additional = [] |
2807 | for revdep in next: | 2843 | for revdep in next: |
2808 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) | 2844 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) |
2809 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | ||
2810 | deps = getsetscenedeps(revdep) | 2845 | deps = getsetscenedeps(revdep) |
2811 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2846 | |
2812 | taskhash = self.rqdata.runtaskentries[revdep].hash | 2847 | taskdepdata[revdep] = bb.TaskData( |
2813 | unihash = self.rqdata.runtaskentries[revdep].unihash | 2848 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2814 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2849 | taskname = taskname, |
2815 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] | 2850 | fn = fn, |
2851 | deps = deps, | ||
2852 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], | ||
2853 | taskhash = self.rqdata.runtaskentries[revdep].hash, | ||
2854 | unihash = self.rqdata.runtaskentries[revdep].unihash, | ||
2855 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], | ||
2856 | taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps, | ||
2857 | ) | ||
2816 | for revdep2 in deps: | 2858 | for revdep2 in deps: |
2817 | if revdep2 not in taskdepdata: | 2859 | if revdep2 not in taskdepdata: |
2818 | additional.append(revdep2) | 2860 | additional.append(revdep2) |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 2a0ecf57e1..89b70fb6a4 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -15,6 +15,7 @@ import difflib | |||
15 | import simplediff | 15 | import simplediff |
16 | import json | 16 | import json |
17 | import types | 17 | import types |
18 | from contextlib import contextmanager | ||
18 | import bb.compress.zstd | 19 | import bb.compress.zstd |
19 | from bb.checksum import FileChecksumCache | 20 | from bb.checksum import FileChecksumCache |
20 | from bb import runqueue | 21 | from bb import runqueue |
@@ -28,6 +29,14 @@ hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') | |||
28 | # The minimum version of the find_siginfo function we need | 29 | # The minimum version of the find_siginfo function we need |
29 | find_siginfo_minversion = 2 | 30 | find_siginfo_minversion = 2 |
30 | 31 | ||
32 | HASHSERV_ENVVARS = [ | ||
33 | "SSL_CERT_DIR", | ||
34 | "SSL_CERT_FILE", | ||
35 | "NO_PROXY", | ||
36 | "HTTPS_PROXY", | ||
37 | "HTTP_PROXY" | ||
38 | ] | ||
39 | |||
31 | def check_siggen_version(siggen): | 40 | def check_siggen_version(siggen): |
32 | if not hasattr(siggen, "find_siginfo_version"): | 41 | if not hasattr(siggen, "find_siginfo_version"): |
33 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") | 42 | bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)") |
@@ -192,9 +201,6 @@ class SignatureGenerator(object): | |||
192 | def save_unitaskhashes(self): | 201 | def save_unitaskhashes(self): |
193 | return | 202 | return |
194 | 203 | ||
195 | def copy_unitaskhashes(self, targetdir): | ||
196 | return | ||
197 | |||
198 | def set_setscene_tasks(self, setscene_tasks): | 204 | def set_setscene_tasks(self, setscene_tasks): |
199 | return | 205 | return |
200 | 206 | ||
@@ -372,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
372 | self.taints[tid] = taint | 378 | self.taints[tid] = taint |
373 | logger.warning("%s is tainted from a forced run" % tid) | 379 | logger.warning("%s is tainted from a forced run" % tid) |
374 | 380 | ||
375 | return | 381 | return set(dep for _, dep in self.runtaskdeps[tid]) |
376 | 382 | ||
377 | def get_taskhash(self, tid, deps, dataCaches): | 383 | def get_taskhash(self, tid, deps, dataCaches): |
378 | 384 | ||
@@ -409,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator): | |||
409 | def save_unitaskhashes(self): | 415 | def save_unitaskhashes(self): |
410 | self.unihash_cache.save(self.unitaskhashes) | 416 | self.unihash_cache.save(self.unitaskhashes) |
411 | 417 | ||
412 | def copy_unitaskhashes(self, targetdir): | ||
413 | self.unihash_cache.copyfile(targetdir) | ||
414 | |||
415 | def dump_sigtask(self, mcfn, task, stampbase, runtime): | 418 | def dump_sigtask(self, mcfn, task, stampbase, runtime): |
416 | tid = mcfn + ":" + task | 419 | tid = mcfn + ":" + task |
417 | mc = bb.runqueue.mc_from_tid(mcfn) | 420 | mc = bb.runqueue.mc_from_tid(mcfn) |
@@ -531,19 +534,28 @@ class SignatureGeneratorUniHashMixIn(object): | |||
531 | def __init__(self, data): | 534 | def __init__(self, data): |
532 | self.extramethod = {} | 535 | self.extramethod = {} |
533 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | 536 | # NOTE: The cache only tracks hashes that exist. Hashes that don't |
534 | # exist are always queries from the server since it is possible for | 537 | # exist are always queried from the server since it is possible for |
535 | # hashes to appear over time, but much less likely for them to | 538 | # hashes to appear over time, but much less likely for them to |
536 | # disappear | 539 | # disappear |
537 | self.unihash_exists_cache = set() | 540 | self.unihash_exists_cache = set() |
538 | self.username = None | 541 | self.username = None |
539 | self.password = None | 542 | self.password = None |
543 | self.env = {} | ||
544 | |||
545 | origenv = data.getVar("BB_ORIGENV") | ||
546 | for e in HASHSERV_ENVVARS: | ||
547 | value = data.getVar(e) | ||
548 | if not value and origenv: | ||
549 | value = origenv.getVar(e) | ||
550 | if value: | ||
551 | self.env[e] = value | ||
540 | super().__init__(data) | 552 | super().__init__(data) |
541 | 553 | ||
542 | def get_taskdata(self): | 554 | def get_taskdata(self): |
543 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password) + super().get_taskdata() | 555 | return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata() |
544 | 556 | ||
545 | def set_taskdata(self, data): | 557 | def set_taskdata(self, data): |
546 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password = data[:6] | 558 | self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6] |
547 | super().set_taskdata(data[6:]) | 559 | super().set_taskdata(data[6:]) |
548 | 560 | ||
549 | def get_hashserv_creds(self): | 561 | def get_hashserv_creds(self): |
@@ -555,15 +567,27 @@ class SignatureGeneratorUniHashMixIn(object): | |||
555 | 567 | ||
556 | return {} | 568 | return {} |
557 | 569 | ||
558 | def client(self): | 570 | @contextmanager |
559 | if getattr(self, '_client', None) is None: | 571 | def _client_env(self): |
560 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | 572 | orig_env = os.environ.copy() |
561 | return self._client | 573 | try: |
574 | for k, v in self.env.items(): | ||
575 | os.environ[k] = v | ||
576 | |||
577 | yield | ||
578 | finally: | ||
579 | for k, v in self.env.items(): | ||
580 | if k in orig_env: | ||
581 | os.environ[k] = orig_env[k] | ||
582 | else: | ||
583 | del os.environ[k] | ||
562 | 584 | ||
563 | def client_pool(self): | 585 | @contextmanager |
564 | if getattr(self, '_client_pool', None) is None: | 586 | def client(self): |
565 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | 587 | with self._client_env(): |
566 | return self._client_pool | 588 | if getattr(self, '_client', None) is None: |
589 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | ||
590 | yield self._client | ||
567 | 591 | ||
568 | def reset(self, data): | 592 | def reset(self, data): |
569 | self.__close_clients() | 593 | self.__close_clients() |
@@ -574,12 +598,13 @@ class SignatureGeneratorUniHashMixIn(object): | |||
574 | return super().exit() | 598 | return super().exit() |
575 | 599 | ||
576 | def __close_clients(self): | 600 | def __close_clients(self): |
577 | if getattr(self, '_client', None) is not None: | 601 | with self._client_env(): |
578 | self._client.close() | 602 | if getattr(self, '_client', None) is not None: |
579 | self._client = None | 603 | self._client.close() |
580 | if getattr(self, '_client_pool', None) is not None: | 604 | self._client = None |
581 | self._client_pool.close() | 605 | if getattr(self, '_client_pool', None) is not None: |
582 | self._client_pool = None | 606 | self._client_pool.close() |
607 | self._client_pool = None | ||
583 | 608 | ||
584 | def get_stampfile_hash(self, tid): | 609 | def get_stampfile_hash(self, tid): |
585 | if tid in self.taskhash: | 610 | if tid in self.taskhash: |
@@ -640,23 +665,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
640 | if len(query) == 0: | 665 | if len(query) == 0: |
641 | return {} | 666 | return {} |
642 | 667 | ||
643 | uncached_query = {} | 668 | query_keys = [] |
644 | result = {} | 669 | result = {} |
645 | for key, unihash in query.items(): | 670 | for key, unihash in query.items(): |
646 | if unihash in self.unihash_exists_cache: | 671 | if unihash in self.unihash_exists_cache: |
647 | result[key] = True | 672 | result[key] = True |
648 | else: | 673 | else: |
649 | uncached_query[key] = unihash | 674 | query_keys.append(key) |
650 | 675 | ||
651 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | 676 | if query_keys: |
652 | # No parallelism required. Make the query serially with the single client | 677 | with self.client() as client: |
653 | uncached_result = { | 678 | query_result = client.unihash_exists_batch(query[k] for k in query_keys) |
654 | key: self.client().unihash_exists(value) for key, value in uncached_query.items() | ||
655 | } | ||
656 | else: | ||
657 | uncached_result = self.client_pool().unihashes_exist(uncached_query) | ||
658 | 679 | ||
659 | for key, exists in uncached_result.items(): | 680 | for idx, key in enumerate(query_keys): |
681 | exists = query_result[idx] | ||
660 | if exists: | 682 | if exists: |
661 | self.unihash_exists_cache.add(query[key]) | 683 | self.unihash_exists_cache.add(query[key]) |
662 | result[key] = exists | 684 | result[key] = exists |
@@ -672,27 +694,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
672 | unihash | 694 | unihash |
673 | """ | 695 | """ |
674 | result = {} | 696 | result = {} |
675 | queries = {} | 697 | query_tids = [] |
676 | query_result = {} | ||
677 | 698 | ||
678 | for tid in tids: | 699 | for tid in tids: |
679 | unihash = self.get_cached_unihash(tid) | 700 | unihash = self.get_cached_unihash(tid) |
680 | if unihash: | 701 | if unihash: |
681 | result[tid] = unihash | 702 | result[tid] = unihash |
682 | else: | 703 | else: |
683 | queries[tid] = (self._get_method(tid), self.taskhash[tid]) | 704 | query_tids.append(tid) |
684 | 705 | ||
685 | if len(queries) == 0: | 706 | if query_tids: |
686 | return result | 707 | with self.client() as client: |
708 | unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids) | ||
687 | 709 | ||
688 | if self.max_parallel <= 1 or len(queries) <= 1: | 710 | for idx, tid in enumerate(query_tids): |
689 | # No parallelism required. Make the query serially with the single client | ||
690 | for tid, args in queries.items(): | ||
691 | query_result[tid] = self.client().get_unihash(*args) | ||
692 | else: | ||
693 | query_result = self.client_pool().get_unihashes(queries) | ||
694 | |||
695 | for tid, unihash in query_result.items(): | ||
696 | # In the absence of being able to discover a unique hash from the | 711 | # In the absence of being able to discover a unique hash from the |
697 | # server, make it be equivalent to the taskhash. The unique "hash" only | 712 | # server, make it be equivalent to the taskhash. The unique "hash" only |
698 | # really needs to be a unique string (not even necessarily a hash), but | 713 | # really needs to be a unique string (not even necessarily a hash), but |
@@ -707,6 +722,8 @@ class SignatureGeneratorUniHashMixIn(object): | |||
707 | # to the server, there is a better chance that they will agree on | 722 | # to the server, there is a better chance that they will agree on |
708 | # the unique hash. | 723 | # the unique hash. |
709 | taskhash = self.taskhash[tid] | 724 | taskhash = self.taskhash[tid] |
725 | unihash = unihashes[idx] | ||
726 | |||
710 | if unihash: | 727 | if unihash: |
711 | # A unique hash equal to the taskhash is not very interesting, | 728 | # A unique hash equal to the taskhash is not very interesting, |
712 | # so it is reported it at debug level 2. If they differ, that | 729 | # so it is reported it at debug level 2. If they differ, that |
@@ -785,7 +802,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
785 | if tid in self.extramethod: | 802 | if tid in self.extramethod: |
786 | method = method + self.extramethod[tid] | 803 | method = method + self.extramethod[tid] |
787 | 804 | ||
788 | data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) | 805 | with self.client() as client: |
806 | data = client.report_unihash(taskhash, method, outhash, unihash, extra_data) | ||
807 | |||
789 | new_unihash = data['unihash'] | 808 | new_unihash = data['unihash'] |
790 | 809 | ||
791 | if new_unihash != unihash: | 810 | if new_unihash != unihash: |
@@ -816,7 +835,9 @@ class SignatureGeneratorUniHashMixIn(object): | |||
816 | if tid in self.extramethod: | 835 | if tid in self.extramethod: |
817 | method = method + self.extramethod[tid] | 836 | method = method + self.extramethod[tid] |
818 | 837 | ||
819 | data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | 838 | with self.client() as client: |
839 | data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) | ||
840 | |||
820 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) | 841 | hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) |
821 | 842 | ||
822 | if data is None: | 843 | if data is None: |
@@ -849,7 +870,6 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
849 | super().init_rundepcheck(data) | 870 | super().init_rundepcheck(data) |
850 | self.server = data.getVar('BB_HASHSERVE') | 871 | self.server = data.getVar('BB_HASHSERVE') |
851 | self.method = "sstate_output_hash" | 872 | self.method = "sstate_output_hash" |
852 | self.max_parallel = 1 | ||
853 | 873 | ||
854 | def clean_checksum_file_path(file_checksum_tuple): | 874 | def clean_checksum_file_path(file_checksum_tuple): |
855 | f, cs = file_checksum_tuple | 875 | f, cs = file_checksum_tuple |
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index f6585fb3aa..c0d1362a0c 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc | |||
106 | self.parseExpression("foo=$(echo bar)") | 106 | self.parseExpression("foo=$(echo bar)") |
107 | self.assertExecs(set(["echo"])) | 107 | self.assertExecs(set(["echo"])) |
108 | 108 | ||
109 | def test_assign_subshell_expansion_quotes(self): | ||
110 | self.parseExpression('foo="$(echo bar)"') | ||
111 | self.assertExecs(set(["echo"])) | ||
112 | |||
113 | def test_assign_subshell_expansion_nested(self): | ||
114 | self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"') | ||
115 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
116 | |||
117 | def test_assign_subshell_expansion_multiple(self): | ||
118 | self.parseExpression('foo="$(func1 "$(func2)") $(func3)"') | ||
119 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
120 | |||
121 | def test_assign_subshell_expansion_escaped_quotes(self): | ||
122 | self.parseExpression('foo="\\"fo\\"o$(func1)"') | ||
123 | self.assertExecs(set(["func1"])) | ||
124 | |||
125 | def test_assign_subshell_expansion_empty(self): | ||
126 | self.parseExpression('foo="bar$()foo"') | ||
127 | self.assertExecs(set()) | ||
128 | |||
129 | def test_assign_subshell_backticks(self): | ||
130 | self.parseExpression("foo=`echo bar`") | ||
131 | self.assertExecs(set(["echo"])) | ||
132 | |||
133 | def test_assign_subshell_backticks_quotes(self): | ||
134 | self.parseExpression('foo="`echo bar`"') | ||
135 | self.assertExecs(set(["echo"])) | ||
136 | |||
137 | def test_assign_subshell_backticks_multiple(self): | ||
138 | self.parseExpression('foo="`func1 bar` `func2`"') | ||
139 | self.assertExecs(set(["func1", "func2"])) | ||
140 | |||
141 | def test_assign_subshell_backticks_escaped_quotes(self): | ||
142 | self.parseExpression('foo="\\"fo\\"o`func1`"') | ||
143 | self.assertExecs(set(["func1"])) | ||
144 | |||
145 | def test_assign_subshell_backticks_empty(self): | ||
146 | self.parseExpression('foo="bar``foo"') | ||
147 | self.assertExecs(set()) | ||
148 | |||
109 | def test_shell_unexpanded(self): | 149 | def test_shell_unexpanded(self): |
110 | self.setEmptyVars(["QT_BASE_NAME"]) | 150 | self.setEmptyVars(["QT_BASE_NAME"]) |
111 | self.parseExpression('echo "${QT_BASE_NAME}"') | 151 | self.parseExpression('echo "${QT_BASE_NAME}"') |
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py index 85c1f79ff3..701129d138 100644 --- a/bitbake/lib/bb/tests/fetch.py +++ b/bitbake/lib/bb/tests/fetch.py | |||
@@ -511,7 +511,8 @@ class MirrorUriTest(FetcherTest): | |||
511 | mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ | 511 | mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ |
512 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ | 512 | "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ |
513 | "https://.*/.* file:///someotherpath/downloads/ " \ | 513 | "https://.*/.* file:///someotherpath/downloads/ " \ |
514 | "http://.*/.* file:///someotherpath/downloads/" | 514 | "http://.*/.* file:///someotherpath/downloads/ " \ |
515 | "svn://svn.server1.com/ svn://svn.server2.com/" | ||
515 | 516 | ||
516 | def test_urireplace(self): | 517 | def test_urireplace(self): |
517 | self.d.setVar("FILESPATH", ".") | 518 | self.d.setVar("FILESPATH", ".") |
@@ -535,6 +536,13 @@ class MirrorUriTest(FetcherTest): | |||
535 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | 536 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) |
536 | self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) | 537 | self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) |
537 | 538 | ||
539 | def test_urilistsvn(self): | ||
540 | # Catch svn:// -> svn:// bug | ||
541 | fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d) | ||
542 | mirrors = bb.fetch2.mirror_from_string(self.mirrorvar) | ||
543 | uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) | ||
544 | self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2']) | ||
545 | |||
538 | def test_mirror_of_mirror(self): | 546 | def test_mirror_of_mirror(self): |
539 | # Test if mirror of a mirror works | 547 | # Test if mirror of a mirror works |
540 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" | 548 | mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" |
@@ -1421,7 +1429,7 @@ class FetchLatestVersionTest(FetcherTest): | |||
1421 | # combination version pattern | 1429 | # combination version pattern |
1422 | ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") | 1430 | ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") |
1423 | : "1.2.0", | 1431 | : "1.2.0", |
1424 | ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") | 1432 | ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") |
1425 | : "2014.01", | 1433 | : "2014.01", |
1426 | # version pattern "yyyymmdd" | 1434 | # version pattern "yyyymmdd" |
1427 | ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") | 1435 | ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") |
@@ -1493,6 +1501,12 @@ class FetchLatestVersionTest(FetcherTest): | |||
1493 | : "2.8", | 1501 | : "2.8", |
1494 | } | 1502 | } |
1495 | 1503 | ||
1504 | test_crate_uris = { | ||
1505 | # basic example; version pattern "A.B.C+cargo-D.E.F" | ||
1506 | ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69") | ||
1507 | : "0.9.29" | ||
1508 | } | ||
1509 | |||
1496 | @skipIfNoNetwork() | 1510 | @skipIfNoNetwork() |
1497 | def test_git_latest_versionstring(self): | 1511 | def test_git_latest_versionstring(self): |
1498 | for k, v in self.test_git_uris.items(): | 1512 | for k, v in self.test_git_uris.items(): |
@@ -1511,7 +1525,7 @@ class FetchLatestVersionTest(FetcherTest): | |||
1511 | 1525 | ||
1512 | def test_wget_latest_versionstring(self): | 1526 | def test_wget_latest_versionstring(self): |
1513 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" | 1527 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" |
1514 | server = HTTPService(testdata) | 1528 | server = HTTPService(testdata, host="127.0.0.1") |
1515 | server.start() | 1529 | server.start() |
1516 | port = server.port | 1530 | port = server.port |
1517 | try: | 1531 | try: |
@@ -1519,10 +1533,10 @@ class FetchLatestVersionTest(FetcherTest): | |||
1519 | self.d.setVar("PN", k[0]) | 1533 | self.d.setVar("PN", k[0]) |
1520 | checkuri = "" | 1534 | checkuri = "" |
1521 | if k[2]: | 1535 | if k[2]: |
1522 | checkuri = "http://localhost:%s/" % port + k[2] | 1536 | checkuri = "http://127.0.0.1:%s/" % port + k[2] |
1523 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) | 1537 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) |
1524 | self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) | 1538 | self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) |
1525 | url = "http://localhost:%s/" % port + k[1] | 1539 | url = "http://127.0.0.1:%s/" % port + k[1] |
1526 | ud = bb.fetch2.FetchData(url, self.d) | 1540 | ud = bb.fetch2.FetchData(url, self.d) |
1527 | pupver = ud.method.latest_versionstring(ud, self.d) | 1541 | pupver = ud.method.latest_versionstring(ud, self.d) |
1528 | verstring = pupver[0] | 1542 | verstring = pupver[0] |
@@ -1532,6 +1546,16 @@ class FetchLatestVersionTest(FetcherTest): | |||
1532 | finally: | 1546 | finally: |
1533 | server.stop() | 1547 | server.stop() |
1534 | 1548 | ||
1549 | @skipIfNoNetwork() | ||
1550 | def test_crate_latest_versionstring(self): | ||
1551 | for k, v in self.test_crate_uris.items(): | ||
1552 | self.d.setVar("PN", k[0]) | ||
1553 | ud = bb.fetch2.FetchData(k[1], self.d) | ||
1554 | pupver = ud.method.latest_versionstring(ud, self.d) | ||
1555 | verstring = pupver[0] | ||
1556 | self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) | ||
1557 | r = bb.utils.vercmp_string(v, verstring) | ||
1558 | self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) | ||
1535 | 1559 | ||
1536 | class FetchCheckStatusTest(FetcherTest): | 1560 | class FetchCheckStatusTest(FetcherTest): |
1537 | test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", | 1561 | test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", |
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index 8b212b7803..4ee45d67a2 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -559,7 +559,10 @@ class ORMWrapper(object): | |||
559 | # we might have an invalid link; no way to detect this. just set it to None | 559 | # we might have an invalid link; no way to detect this. just set it to None |
560 | filetarget_obj = None | 560 | filetarget_obj = None |
561 | 561 | ||
562 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 562 | try: |
563 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
564 | except Target_File.DoesNotExist: | ||
565 | parent_obj = None | ||
563 | 566 | ||
564 | Target_File.objects.create( | 567 | Target_File.objects.create( |
565 | target = target_obj, | 568 | target = target_obj, |