summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/__init__.py34
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py109
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py37
-rw-r--r--bitbake/lib/bb/build.py2
-rw-r--r--bitbake/lib/bb/codeparser.py31
-rw-r--r--bitbake/lib/bb/cooker.py28
-rw-r--r--bitbake/lib/bb/fetch2/crate.py9
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py2
-rw-r--r--bitbake/lib/bb/fetch2/svn.py3
-rw-r--r--bitbake/lib/bb/fetch2/wget.py25
-rw-r--r--bitbake/lib/bb/parse/__init__.py12
-rw-r--r--bitbake/lib/bb/runqueue.py102
-rw-r--r--bitbake/lib/bb/siggen.py62
-rw-r--r--bitbake/lib/bb/tests/codeparser.py40
-rw-r--r--bitbake/lib/bb/tests/fetch.py34
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py5
17 files changed, 309 insertions, 228 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 15013540c2..574e0de5be 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "2.9.0" 12__version__ = "2.9.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 8, 0): 15if sys.version_info < (3, 8, 0):
@@ -36,6 +36,7 @@ class BBHandledException(Exception):
36 36
37import os 37import os
38import logging 38import logging
39from collections import namedtuple
39 40
40 41
41class NullHandler(logging.Handler): 42class NullHandler(logging.Handler):
@@ -103,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
103 self.setup_bblogger(logger.name) 104 self.setup_bblogger(logger.name)
104 super().__init__(logger, *args, **kwargs) 105 super().__init__(logger, *args, **kwargs)
105 106
106 if sys.version_info < (3, 6):
107 # These properties were added in Python 3.6. Add them in older versions
108 # for compatibility
109 @property
110 def manager(self):
111 return self.logger.manager
112
113 @manager.setter
114 def manager(self, value):
115 self.logger.manager = value
116
117 @property
118 def name(self):
119 return self.logger.name
120
121 def __repr__(self):
122 logger = self.logger
123 level = logger.getLevelName(logger.getEffectiveLevel())
124 return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
125
126logging.LoggerAdapter = BBLoggerAdapter 107logging.LoggerAdapter = BBLoggerAdapter
127 108
128logger = logging.getLogger("BitBake") 109logger = logging.getLogger("BitBake")
@@ -227,3 +208,14 @@ def deprecate_import(current, modulename, fromlist, renames = None):
227 208
228 setattr(sys.modules[current], newname, newobj) 209 setattr(sys.modules[current], newname, newobj)
229 210
211TaskData = namedtuple("TaskData", [
212 "pn",
213 "taskname",
214 "fn",
215 "deps",
216 "provides",
217 "taskhash",
218 "unihash",
219 "hashfn",
220 "taskhash_deps",
221])
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
index 639e1607f8..a4371643d7 100644
--- a/bitbake/lib/bb/asyncrpc/__init__.py
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -5,7 +5,7 @@
5# 5#
6 6
7 7
8from .client import AsyncClient, Client, ClientPool 8from .client import AsyncClient, Client
9from .serv import AsyncServer, AsyncServerConnection 9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK 10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import ( 11from .exceptions import (
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index a350b4fb12..9be49261c0 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -24,6 +24,12 @@ ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1 24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2 25ADDR_TYPE_WS = 2
26 26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
32
27def parse_address(addr): 33def parse_address(addr):
28 if addr.startswith(UNIX_PREFIX): 34 if addr.startswith(UNIX_PREFIX):
29 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) 35 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
@@ -39,6 +45,7 @@ def parse_address(addr):
39 45
40 return (ADDR_TYPE_TCP, (host, int(port))) 46 return (ADDR_TYPE_TCP, (host, int(port)))
41 47
48
42class AsyncClient(object): 49class AsyncClient(object):
43 def __init__( 50 def __init__(
44 self, 51 self,
@@ -86,8 +93,30 @@ class AsyncClient(object):
86 async def connect_websocket(self, uri): 93 async def connect_websocket(self, uri):
87 import websockets 94 import websockets
88 95
96 try:
97 version = tuple(
98 int(v)
99 for v in websockets.__version__.split(".")[
100 0 : len(WEBSOCKETS_MIN_VERSION)
101 ]
102 )
103 except ValueError:
104 raise ImportError(
105 f"Unable to parse websockets version '{websockets.__version__}'"
106 )
107
108 if version < WEBSOCKETS_MIN_VERSION:
109 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
110 raise ImportError(
111 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
112 )
113
89 async def connect_sock(): 114 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None) 115 websocket = await websockets.connect(
116 uri,
117 ping_interval=None,
118 open_timeout=self.timeout,
119 )
91 return WebsocketConnection(websocket, self.timeout) 120 return WebsocketConnection(websocket, self.timeout)
92 121
93 self._connect_sock = connect_sock 122 self._connect_sock = connect_sock
@@ -225,85 +254,9 @@ class Client(object):
225 def close(self): 254 def close(self):
226 if self.loop: 255 if self.loop:
227 self.loop.run_until_complete(self.client.close()) 256 self.loop.run_until_complete(self.client.close())
228 if sys.version_info >= (3, 6):
229 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
230 self.loop.close()
231 self.loop = None
232
233 def __enter__(self):
234 return self
235
236 def __exit__(self, exc_type, exc_value, traceback):
237 self.close()
238 return False
239
240
241class ClientPool(object):
242 def __init__(self, max_clients):
243 self.avail_clients = []
244 self.num_clients = 0
245 self.max_clients = max_clients
246 self.loop = None
247 self.client_condition = None
248
249 @abc.abstractmethod
250 async def _new_client(self):
251 raise NotImplementedError("Must be implemented in derived class")
252
253 def close(self):
254 if self.client_condition:
255 self.client_condition = None
256
257 if self.loop:
258 self.loop.run_until_complete(self.__close_clients())
259 self.loop.run_until_complete(self.loop.shutdown_asyncgens()) 257 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
260 self.loop.close() 258 self.loop.close()
261 self.loop = None 259 self.loop = None
262
263 def run_tasks(self, tasks):
264 if not self.loop:
265 self.loop = asyncio.new_event_loop()
266
267 thread = Thread(target=self.__thread_main, args=(tasks,))
268 thread.start()
269 thread.join()
270
271 @contextlib.asynccontextmanager
272 async def get_client(self):
273 async with self.client_condition:
274 if self.avail_clients:
275 client = self.avail_clients.pop()
276 elif self.num_clients < self.max_clients:
277 self.num_clients += 1
278 client = await self._new_client()
279 else:
280 while not self.avail_clients:
281 await self.client_condition.wait()
282 client = self.avail_clients.pop()
283
284 try:
285 yield client
286 finally:
287 async with self.client_condition:
288 self.avail_clients.append(client)
289 self.client_condition.notify()
290
291 def __thread_main(self, tasks):
292 async def process_task(task):
293 async with self.get_client() as client:
294 await task(client)
295
296 asyncio.set_event_loop(self.loop)
297 if not self.client_condition:
298 self.client_condition = asyncio.Condition()
299 tasks = [process_task(t) for t in tasks]
300 self.loop.run_until_complete(asyncio.gather(*tasks))
301
302 async def __close_clients(self):
303 for c in self.avail_clients:
304 await c.close()
305 self.avail_clients = []
306 self.num_clients = 0
307 260
308 def __enter__(self): 261 def __enter__(self):
309 return self 262 return self
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
index a66117acad..667217c5c1 100644
--- a/bitbake/lib/bb/asyncrpc/serv.py
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -138,14 +138,20 @@ class StreamServer(object):
138 138
139 139
140class TCPStreamServer(StreamServer): 140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger): 141 def __init__(self, host, port, handler, logger, *, reuseport=False):
142 super().__init__(handler, logger) 142 super().__init__(handler, logger)
143 self.host = host 143 self.host = host
144 self.port = port 144 self.port = port
145 self.reuseport = reuseport
145 146
146 def start(self, loop): 147 def start(self, loop):
147 self.server = loop.run_until_complete( 148 self.server = loop.run_until_complete(
148 asyncio.start_server(self.handle_stream_client, self.host, self.port) 149 asyncio.start_server(
150 self.handle_stream_client,
151 self.host,
152 self.port,
153 reuse_port=self.reuseport,
154 )
149 ) 155 )
150 156
151 for s in self.server.sockets: 157 for s in self.server.sockets:
@@ -209,11 +215,12 @@ class UnixStreamServer(StreamServer):
209 215
210 216
211class WebsocketsServer(object): 217class WebsocketsServer(object):
212 def __init__(self, host, port, handler, logger): 218 def __init__(self, host, port, handler, logger, *, reuseport=False):
213 self.host = host 219 self.host = host
214 self.port = port 220 self.port = port
215 self.handler = handler 221 self.handler = handler
216 self.logger = logger 222 self.logger = logger
223 self.reuseport = reuseport
217 224
218 def start(self, loop): 225 def start(self, loop):
219 import websockets.server 226 import websockets.server
@@ -224,6 +231,7 @@ class WebsocketsServer(object):
224 self.host, 231 self.host,
225 self.port, 232 self.port,
226 ping_interval=None, 233 ping_interval=None,
234 reuse_port=self.reuseport,
227 ) 235 )
228 ) 236 )
229 237
@@ -262,14 +270,26 @@ class AsyncServer(object):
262 self.loop = None 270 self.loop = None
263 self.run_tasks = [] 271 self.run_tasks = []
264 272
265 def start_tcp_server(self, host, port): 273 def start_tcp_server(self, host, port, *, reuseport=False):
266 self.server = TCPStreamServer(host, port, self._client_handler, self.logger) 274 self.server = TCPStreamServer(
275 host,
276 port,
277 self._client_handler,
278 self.logger,
279 reuseport=reuseport,
280 )
267 281
268 def start_unix_server(self, path): 282 def start_unix_server(self, path):
269 self.server = UnixStreamServer(path, self._client_handler, self.logger) 283 self.server = UnixStreamServer(path, self._client_handler, self.logger)
270 284
271 def start_websocket_server(self, host, port): 285 def start_websocket_server(self, host, port, reuseport=False):
272 self.server = WebsocketsServer(host, port, self._client_handler, self.logger) 286 self.server = WebsocketsServer(
287 host,
288 port,
289 self._client_handler,
290 self.logger,
291 reuseport=reuseport,
292 )
273 293
274 async def _client_handler(self, socket): 294 async def _client_handler(self, socket):
275 address = socket.address 295 address = socket.address
@@ -368,8 +388,7 @@ class AsyncServer(object):
368 388
369 self._serve_forever(tasks) 389 self._serve_forever(tasks)
370 390
371 if sys.version_info >= (3, 6): 391 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
372 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
373 self.loop.close() 392 self.loop.close()
374 393
375 queue = multiprocessing.Queue() 394 queue = multiprocessing.Queue()
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 44d08f5c55..ab8bce3d57 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None):
197 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
198 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
199 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
200 202
201 if flags and dirs is None: 203 if flags and dirs is None:
202 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 2e8b7ced3c..691bdff75e 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -484,19 +484,34 @@ class ShellParser():
484 """ 484 """
485 485
486 words = list(words) 486 words = list(words)
487 for word in list(words): 487 for word in words:
488 wtree = pyshlex.make_wordtree(word[1]) 488 wtree = pyshlex.make_wordtree(word[1])
489 for part in wtree: 489 for part in wtree:
490 if not isinstance(part, list): 490 if not isinstance(part, list):
491 continue 491 continue
492 492
493 if part[0] in ('`', '$('): 493 candidates = [part]
494 command = pyshlex.wordtree_as_string(part[1:-1]) 494
495 self._parse_shell(command) 495 # If command is of type:
496 496 #
497 if word[0] in ("cmd_name", "cmd_word"): 497 # var="... $(cmd [...]) ..."
498 if word in words: 498 #
499 words.remove(word) 499 # Then iterate on what's between the quotes and if we find a
500 # list, make that what we check for below.
501 if len(part) >= 3 and part[0] == '"':
502 for p in part[1:-1]:
503 if isinstance(p, list):
504 candidates.append(p)
505
506 for candidate in candidates:
507 if len(candidate) >= 2:
508 if candidate[0] in ('`', '$('):
509 command = pyshlex.wordtree_as_string(candidate[1:-1])
510 self._parse_shell(command)
511
512 if word[0] in ("cmd_name", "cmd_word"):
513 if word in words:
514 words.remove(word)
500 515
501 usetoken = False 516 usetoken = False
502 for word in words: 517 for word in words:
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 25b614f1e4..6754f986bf 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -318,9 +318,10 @@ class BBCooker:
318 try: 318 try:
319 with hashserv.create_client(upstream) as client: 319 with hashserv.create_client(upstream) as client:
320 client.ping() 320 client.ping()
321 except ConnectionError as e: 321 except (ConnectionError, ImportError) as e:
322 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 322 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
323 % (upstream, repr(e))) 323 % (upstream, repr(e)))
324 upstream = None
324 325
325 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
326 self.hashserv = hashserv.create_server( 327 self.hashserv = hashserv.create_server(
@@ -679,14 +680,14 @@ class BBCooker:
679 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
680 return taskdata, runlist 681 return taskdata, runlist
681 682
682 def prepareTreeData(self, pkgs_to_build, task): 683 def prepareTreeData(self, pkgs_to_build, task, halt=False):
683 """ 684 """
684 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
685 """ 686 """
686 687
687 # We set halt to False here to prevent unbuildable targets raising 688 # We set halt to False here to prevent unbuildable targets raising
688 # an exception when we're just generating data 689 # an exception when we're just generating data
689 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
690 691
691 return runlist, taskdata 692 return runlist, taskdata
692 693
@@ -700,7 +701,7 @@ class BBCooker:
700 if not task.startswith("do_"): 701 if not task.startswith("do_"):
701 task = "do_%s" % task 702 task = "do_%s" % task
702 703
703 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
704 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
705 rq.rqdata.prepare() 706 rq.rqdata.prepare()
706 return self.buildDependTree(rq, taskdata) 707 return self.buildDependTree(rq, taskdata)
@@ -1458,7 +1459,6 @@ class BBCooker:
1458 1459
1459 if t in task or getAllTaskSignatures: 1460 if t in task or getAllTaskSignatures:
1460 try: 1461 try:
1461 rq.rqdata.prepare_task_hash(tid)
1462 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) 1462 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1463 except KeyError: 1463 except KeyError:
1464 sig.append(self.getTaskSignatures(target, [t])[0]) 1464 sig.append(self.getTaskSignatures(target, [t])[0])
@@ -1812,8 +1812,8 @@ class CookerCollectFiles(object):
1812 bb.event.fire(CookerExit(), eventdata) 1812 bb.event.fire(CookerExit(), eventdata)
1813 1813
1814 # We need to track where we look so that we can know when the cache is invalid. There 1814 # We need to track where we look so that we can know when the cache is invalid. There
1815 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1815 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1816 # (or os.scandir() for python 3.6+) calls while we run glob(). 1816 # calls while we run glob().
1817 origlistdir = os.listdir 1817 origlistdir = os.listdir
1818 if hasattr(os, 'scandir'): 1818 if hasattr(os, 'scandir'):
1819 origscandir = os.scandir 1819 origscandir = os.scandir
@@ -2224,9 +2224,8 @@ class CookerParser(object):
2224 2224
2225 for process in self.processes: 2225 for process in self.processes:
2226 process.join() 2226 process.join()
2227 # Added in 3.7, cleans up zombies 2227 # clean up zombies
2228 if hasattr(process, "close"): 2228 process.close()
2229 process.close()
2230 2229
2231 bb.codeparser.parser_cache_save() 2230 bb.codeparser.parser_cache_save()
2232 bb.codeparser.parser_cache_savemerge() 2231 bb.codeparser.parser_cache_savemerge()
@@ -2236,12 +2235,13 @@ class CookerParser(object):
2236 profiles = [] 2235 profiles = []
2237 for i in self.process_names: 2236 for i in self.process_names:
2238 logfile = "profile-parse-%s.log" % i 2237 logfile = "profile-parse-%s.log" % i
2239 if os.path.exists(logfile): 2238 if os.path.exists(logfile) and os.path.getsize(logfile):
2240 profiles.append(logfile) 2239 profiles.append(logfile)
2241 2240
2242 pout = "profile-parse.log.processed" 2241 if profiles:
2243 bb.utils.process_profilelog(profiles, pout = pout) 2242 pout = "profile-parse.log.processed"
2244 print("Processed parsing statistics saved to %s" % (pout)) 2243 bb.utils.process_profilelog(profiles, pout = pout)
2244 print("Processed parsing statistics saved to %s" % (pout))
2245 2245
2246 def final_cleanup(self): 2246 def final_cleanup(self):
2247 if self.syncthread: 2247 if self.syncthread:
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
index 01d49435c3..e611736f06 100644
--- a/bitbake/lib/bb/fetch2/crate.py
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -70,6 +70,7 @@ class Crate(Wget):
70 host = 'crates.io/api/v1/crates' 70 host = 'crates.io/api/v1/crates'
71 71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version) 72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
73 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) 74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
74 if 'name' not in ud.parm: 75 if 'name' not in ud.parm:
75 ud.parm['name'] = '%s-%s' % (name, version) 76 ud.parm['name'] = '%s-%s' % (name, version)
@@ -139,3 +140,11 @@ class Crate(Wget):
139 mdpath = os.path.join(bbpath, cratepath, mdfile) 140 mdpath = os.path.join(bbpath, cratepath, mdfile)
140 with open(mdpath, "w") as f: 141 with open(mdpath, "w") as f:
141 json.dump(metadata, f) 142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index ff5f8dc755..b55e885d7b 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -268,7 +268,7 @@ class NpmShrinkWrap(FetchMethod):
268 268
269 def unpack(self, ud, rootdir, d): 269 def unpack(self, ud, rootdir, d):
270 """Unpack the downloaded dependencies""" 270 """Unpack the downloaded dependencies"""
271 destdir = d.getVar("S") 271 destdir = rootdir
272 destsuffix = ud.parm.get("destsuffix") 272 destsuffix = ud.parm.get("destsuffix")
273 if destsuffix: 273 if destsuffix:
274 destdir = os.path.join(rootdir, destsuffix) 274 destdir = os.path.join(rootdir, destsuffix)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index d40e4d2909..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -210,3 +210,6 @@ class Svn(FetchMethod):
210 210
211 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
212 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index fbfa6938ac..2e92117634 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -108,7 +108,8 @@ class Wget(FetchMethod):
108 108
109 fetchcmd = self.basecmd 109 fetchcmd = self.basecmd
110 110
111 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp" 111 dldir = os.path.realpath(d.getVar("DL_DIR"))
112 localpath = os.path.join(dldir, ud.localfile) + ".tmp"
112 bb.utils.mkdirhier(os.path.dirname(localpath)) 113 bb.utils.mkdirhier(os.path.dirname(localpath))
113 fetchcmd += " -O %s" % shlex.quote(localpath) 114 fetchcmd += " -O %s" % shlex.quote(localpath)
114 115
@@ -128,12 +129,21 @@ class Wget(FetchMethod):
128 uri = ud.url.split(";")[0] 129 uri = ud.url.split(";")[0]
129 if os.path.exists(ud.localpath): 130 if os.path.exists(ud.localpath):
130 # file exists, but we didnt complete it.. trying again.. 131 # file exists, but we didnt complete it.. trying again..
131 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri) 132 fetchcmd += " -c -P " + dldir + " '" + uri + "'"
132 else: 133 else:
133 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri) 134 fetchcmd += " -P " + dldir + " '" + uri + "'"
134 135
135 self._runwget(ud, d, fetchcmd, False) 136 self._runwget(ud, d, fetchcmd, False)
136 137
138 # Sanity check since wget can pretend it succeed when it didn't
139 # Also, this used to happen if sourceforge sent us to the mirror page
140 if not os.path.exists(localpath):
141 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
142
143 if os.path.getsize(localpath) == 0:
144 os.remove(localpath)
145 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
146
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the 147 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same 148 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum) 149 # source, one with an incorrect checksum)
@@ -143,15 +153,6 @@ class Wget(FetchMethod):
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files 153 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4]) 154 os.rename(localpath, localpath[:-4])
145 155
146 # Sanity check since wget can pretend it succeed when it didn't
147 # Also, this used to happen if sourceforge sent us to the mirror page
148 if not os.path.exists(ud.localpath):
149 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
150
151 if os.path.getsize(ud.localpath) == 0:
152 os.remove(ud.localpath)
153 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
154
155 return True 156 return True
156 157
157 def checkstatus(self, fetch, ud, d, try_again=True): 158 def checkstatus(self, fetch, ud, d, try_again=True):
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index a4358f1374..7ffdaa6fd7 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
63def check_mtime(f, mtime): 65def check_mtime(f, mtime):
64 try: 66 try:
65 current_mtime = os.stat(f)[stat.ST_MTIME] 67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
66 __mtime_cache[f] = current_mtime 69 __mtime_cache[f] = current_mtime
67 except OSError: 70 except OSError:
68 current_mtime = 0 71 current_mtime = 0
@@ -70,7 +73,8 @@ def check_mtime(f, mtime):
70 73
71def update_mtime(f): 74def update_mtime(f):
72 try: 75 try:
73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
74 except OSError: 78 except OSError:
75 if f in __mtime_cache: 79 if f in __mtime_cache:
76 del __mtime_cache[f] 80 del __mtime_cache[f]
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index bc7e18175d..3462ed4457 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -1273,27 +1273,41 @@ class RunQueueData:
1273 1273
1274 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1274 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1275 1275
1276 starttime = time.time()
1277 lasttime = starttime
1278
1276 # Iterate over the task list and call into the siggen code 1279 # Iterate over the task list and call into the siggen code
1277 dealtwith = set() 1280 dealtwith = set()
1278 todeal = set(self.runtaskentries) 1281 todeal = set(self.runtaskentries)
1279 while todeal: 1282 while todeal:
1283 ready = set()
1280 for tid in todeal.copy(): 1284 for tid in todeal.copy():
1281 if not (self.runtaskentries[tid].depends - dealtwith): 1285 if not (self.runtaskentries[tid].depends - dealtwith):
1282 dealtwith.add(tid) 1286 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1283 todeal.remove(tid) 1287 # get_taskhash for a given tid *must* be called before get_unihash* below
1284 self.prepare_task_hash(tid) 1288 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1285 bb.event.check_for_interrupts(self.cooker.data) 1289 ready.add(tid)
1290 unihashes = bb.parse.siggen.get_unihashes(ready)
1291 for tid in ready:
1292 dealtwith.add(tid)
1293 todeal.remove(tid)
1294 self.runtaskentries[tid].unihash = unihashes[tid]
1295
1296 bb.event.check_for_interrupts(self.cooker.data)
1297
1298 if time.time() > (lasttime + 30):
1299 lasttime = time.time()
1300 hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime))
1301
1302 endtime = time.time()
1303 if (endtime-starttime > 60):
1304 hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime))
1286 1305
1287 bb.parse.siggen.writeout_file_checksum_cache() 1306 bb.parse.siggen.writeout_file_checksum_cache()
1288 1307
1289 #self.dump_data() 1308 #self.dump_data()
1290 return len(self.runtaskentries) 1309 return len(self.runtaskentries)
1291 1310
1292 def prepare_task_hash(self, tid):
1293 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1296
1297 def dump_data(self): 1311 def dump_data(self):
1298 """ 1312 """
1299 Dump some debug information on the internal data structures 1313 Dump some debug information on the internal data structures
@@ -2438,14 +2452,17 @@ class RunQueueExecute:
2438 taskdepdata_cache = {} 2452 taskdepdata_cache = {}
2439 for task in self.rqdata.runtaskentries: 2453 for task in self.rqdata.runtaskentries:
2440 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2454 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2441 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2455 taskdepdata_cache[task] = bb.TaskData(
2442 deps = self.rqdata.runtaskentries[task].depends 2456 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2443 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2457 taskname = taskname,
2444 taskhash = self.rqdata.runtaskentries[task].hash 2458 fn = fn,
2445 unihash = self.rqdata.runtaskentries[task].unihash 2459 deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends),
2446 deps = self.filtermcdeps(task, mc, deps) 2460 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2461 taskhash = self.rqdata.runtaskentries[task].hash,
2448 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2462 unihash = self.rqdata.runtaskentries[task].unihash,
2463 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2464 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps,
2465 )
2449 2466
2450 self.taskdepdata_cache = taskdepdata_cache 2467 self.taskdepdata_cache = taskdepdata_cache
2451 2468
@@ -2460,9 +2477,11 @@ class RunQueueExecute:
2460 while next: 2477 while next:
2461 additional = [] 2478 additional = []
2462 for revdep in next: 2479 for revdep in next:
2463 self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash 2480 self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace(
2481 unihash=self.rqdata.runtaskentries[revdep].unihash
2482 )
2464 taskdepdata[revdep] = self.taskdepdata_cache[revdep] 2483 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2465 for revdep2 in self.taskdepdata_cache[revdep][3]: 2484 for revdep2 in self.taskdepdata_cache[revdep].deps:
2466 if revdep2 not in taskdepdata: 2485 if revdep2 not in taskdepdata:
2467 additional.append(revdep2) 2486 additional.append(revdep2)
2468 next = additional 2487 next = additional
@@ -2531,9 +2550,6 @@ class RunQueueExecute:
2531 self.rqdata.runtaskentries[hashtid].unihash = unihash 2550 self.rqdata.runtaskentries[hashtid].unihash = unihash
2532 bb.parse.siggen.set_unihash(hashtid, unihash) 2551 bb.parse.siggen.set_unihash(hashtid, unihash)
2533 toprocess.add(hashtid) 2552 toprocess.add(hashtid)
2534 if torehash:
2535 # Need to save after set_unihash above
2536 bb.parse.siggen.save_unitaskhashes()
2537 2553
2538 # Work out all tasks which depend upon these 2554 # Work out all tasks which depend upon these
2539 total = set() 2555 total = set()
@@ -2556,17 +2572,28 @@ class RunQueueExecute:
2556 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2572 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2557 next.add(p) 2573 next.add(p)
2558 2574
2575 starttime = time.time()
2576 lasttime = starttime
2577
2559 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled 2578 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled
2560 while next: 2579 while next:
2561 current = next.copy() 2580 current = next.copy()
2562 next = set() 2581 next = set()
2582 ready = {}
2563 for tid in current: 2583 for tid in current:
2564 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2584 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2565 continue 2585 continue
2586 # get_taskhash for a given tid *must* be called before get_unihash* below
2587 ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2588
2589 unihashes = bb.parse.siggen.get_unihashes(ready.keys())
2590
2591 for tid in ready:
2566 orighash = self.rqdata.runtaskentries[tid].hash 2592 orighash = self.rqdata.runtaskentries[tid].hash
2567 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) 2593 newhash = ready[tid]
2568 origuni = self.rqdata.runtaskentries[tid].unihash 2594 origuni = self.rqdata.runtaskentries[tid].unihash
2569 newuni = bb.parse.siggen.get_unihash(tid) 2595 newuni = unihashes[tid]
2596
2570 # FIXME, need to check it can come from sstate at all for determinism? 2597 # FIXME, need to check it can come from sstate at all for determinism?
2571 remapped = False 2598 remapped = False
2572 if newuni == origuni: 2599 if newuni == origuni:
@@ -2587,6 +2614,15 @@ class RunQueueExecute:
2587 next |= self.rqdata.runtaskentries[tid].revdeps 2614 next |= self.rqdata.runtaskentries[tid].revdeps
2588 total.remove(tid) 2615 total.remove(tid)
2589 next.intersection_update(total) 2616 next.intersection_update(total)
2617 bb.event.check_for_interrupts(self.cooker.data)
2618
2619 if time.time() > (lasttime + 30):
2620 lasttime = time.time()
2621 hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime))
2622
2623 endtime = time.time()
2624 if (endtime-starttime > 60):
2625 hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime))
2590 2626
2591 if changed: 2627 if changed:
2592 for mc in self.rq.worker: 2628 for mc in self.rq.worker:
@@ -2806,13 +2842,19 @@ class RunQueueExecute:
2806 additional = [] 2842 additional = []
2807 for revdep in next: 2843 for revdep in next:
2808 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2844 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
2809 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2810 deps = getsetscenedeps(revdep) 2845 deps = getsetscenedeps(revdep)
2811 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2846
2812 taskhash = self.rqdata.runtaskentries[revdep].hash 2847 taskdepdata[revdep] = bb.TaskData(
2813 unihash = self.rqdata.runtaskentries[revdep].unihash 2848 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2814 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2849 taskname = taskname,
2815 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2850 fn = fn,
2851 deps = deps,
2852 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2853 taskhash = self.rqdata.runtaskentries[revdep].hash,
2854 unihash = self.rqdata.runtaskentries[revdep].unihash,
2855 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2856 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps,
2857 )
2816 for revdep2 in deps: 2858 for revdep2 in deps:
2817 if revdep2 not in taskdepdata: 2859 if revdep2 not in taskdepdata:
2818 additional.append(revdep2) 2860 additional.append(revdep2)
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 8ab08ec961..89b70fb6a4 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -201,9 +201,6 @@ class SignatureGenerator(object):
201 def save_unitaskhashes(self): 201 def save_unitaskhashes(self):
202 return 202 return
203 203
204 def copy_unitaskhashes(self, targetdir):
205 return
206
207 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
208 return 205 return
209 206
@@ -381,7 +378,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
381 self.taints[tid] = taint 378 self.taints[tid] = taint
382 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
383 380
384 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
385 382
386 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
387 384
@@ -418,9 +415,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
418 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
419 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
420 417
421 def copy_unitaskhashes(self, targetdir):
422 self.unihash_cache.copyfile(targetdir)
423
424 def dump_sigtask(self, mcfn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
425 tid = mcfn + ":" + task 419 tid = mcfn + ":" + task
426 mc = bb.runqueue.mc_from_tid(mcfn) 420 mc = bb.runqueue.mc_from_tid(mcfn)
@@ -540,7 +534,7 @@ class SignatureGeneratorUniHashMixIn(object):
540 def __init__(self, data): 534 def __init__(self, data):
541 self.extramethod = {} 535 self.extramethod = {}
542 # NOTE: The cache only tracks hashes that exist. Hashes that don't 536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
543 # exist are always queries from the server since it is possible for 537 # exist are always queried from the server since it is possible for
544 # hashes to appear over time, but much less likely for them to 538 # hashes to appear over time, but much less likely for them to
545 # disappear 539 # disappear
546 self.unihash_exists_cache = set() 540 self.unihash_exists_cache = set()
@@ -558,11 +552,11 @@ class SignatureGeneratorUniHashMixIn(object):
558 super().__init__(data) 552 super().__init__(data)
559 553
560 def get_taskdata(self): 554 def get_taskdata(self):
561 return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
562 556
563 def set_taskdata(self, data): 557 def set_taskdata(self, data):
564 self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
565 super().set_taskdata(data[7:]) 559 super().set_taskdata(data[6:])
566 560
567 def get_hashserv_creds(self): 561 def get_hashserv_creds(self):
568 if self.username and self.password: 562 if self.username and self.password:
@@ -595,13 +589,6 @@ class SignatureGeneratorUniHashMixIn(object):
595 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) 589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
596 yield self._client 590 yield self._client
597 591
598 @contextmanager
599 def client_pool(self):
600 with self._client_env():
601 if getattr(self, '_client_pool', None) is None:
602 self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
603 yield self._client_pool
604
605 def reset(self, data): 592 def reset(self, data):
606 self.__close_clients() 593 self.__close_clients()
607 return super().reset(data) 594 return super().reset(data)
@@ -678,25 +665,20 @@ class SignatureGeneratorUniHashMixIn(object):
678 if len(query) == 0: 665 if len(query) == 0:
679 return {} 666 return {}
680 667
681 uncached_query = {} 668 query_keys = []
682 result = {} 669 result = {}
683 for key, unihash in query.items(): 670 for key, unihash in query.items():
684 if unihash in self.unihash_exists_cache: 671 if unihash in self.unihash_exists_cache:
685 result[key] = True 672 result[key] = True
686 else: 673 else:
687 uncached_query[key] = unihash 674 query_keys.append(key)
688 675
689 if self.max_parallel <= 1 or len(uncached_query) <= 1: 676 if query_keys:
690 # No parallelism required. Make the query serially with the single client
691 with self.client() as client: 677 with self.client() as client:
692 uncached_result = { 678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
693 key: client.unihash_exists(value) for key, value in uncached_query.items()
694 }
695 else:
696 with self.client_pool() as client_pool:
697 uncached_result = client_pool.unihashes_exist(uncached_query)
698 679
699 for key, exists in uncached_result.items(): 680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
700 if exists: 682 if exists:
701 self.unihash_exists_cache.add(query[key]) 683 self.unihash_exists_cache.add(query[key])
702 result[key] = exists 684 result[key] = exists
@@ -712,29 +694,20 @@ class SignatureGeneratorUniHashMixIn(object):
712 unihash 694 unihash
713 """ 695 """
714 result = {} 696 result = {}
715 queries = {} 697 query_tids = []
716 query_result = {}
717 698
718 for tid in tids: 699 for tid in tids:
719 unihash = self.get_cached_unihash(tid) 700 unihash = self.get_cached_unihash(tid)
720 if unihash: 701 if unihash:
721 result[tid] = unihash 702 result[tid] = unihash
722 else: 703 else:
723 queries[tid] = (self._get_method(tid), self.taskhash[tid]) 704 query_tids.append(tid)
724
725 if len(queries) == 0:
726 return result
727 705
728 if self.max_parallel <= 1 or len(queries) <= 1: 706 if query_tids:
729 # No parallelism required. Make the query serially with the single client
730 with self.client() as client: 707 with self.client() as client:
731 for tid, args in queries.items(): 708 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
732 query_result[tid] = client.get_unihash(*args)
733 else:
734 with self.client_pool() as client_pool:
735 query_result = client_pool.get_unihashes(queries)
736 709
737 for tid, unihash in query_result.items(): 710 for idx, tid in enumerate(query_tids):
738 # In the absence of being able to discover a unique hash from the 711 # In the absence of being able to discover a unique hash from the
739 # server, make it be equivalent to the taskhash. The unique "hash" only 712 # server, make it be equivalent to the taskhash. The unique "hash" only
740 # really needs to be a unique string (not even necessarily a hash), but 713 # really needs to be a unique string (not even necessarily a hash), but
@@ -749,6 +722,8 @@ class SignatureGeneratorUniHashMixIn(object):
749 # to the server, there is a better chance that they will agree on 722 # to the server, there is a better chance that they will agree on
750 # the unique hash. 723 # the unique hash.
751 taskhash = self.taskhash[tid] 724 taskhash = self.taskhash[tid]
725 unihash = unihashes[idx]
726
752 if unihash: 727 if unihash:
753 # A unique hash equal to the taskhash is not very interesting, 728 # A unique hash equal to the taskhash is not very interesting,
754 # so it is reported it at debug level 2. If they differ, that 729 # so it is reported it at debug level 2. If they differ, that
@@ -895,7 +870,6 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
895 super().init_rundepcheck(data) 870 super().init_rundepcheck(data)
896 self.server = data.getVar('BB_HASHSERVE') 871 self.server = data.getVar('BB_HASHSERVE')
897 self.method = "sstate_output_hash" 872 self.method = "sstate_output_hash"
898 self.max_parallel = 1
899 873
900def clean_checksum_file_path(file_checksum_tuple): 874def clean_checksum_file_path(file_checksum_tuple):
901 f, cs = file_checksum_tuple 875 f, cs = file_checksum_tuple
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index f6585fb3aa..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
106 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
107 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
108 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
109 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
110 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
111 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 85c1f79ff3..701129d138 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -511,7 +511,8 @@ class MirrorUriTest(FetcherTest):
511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ 511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ 512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
513 "https://.*/.* file:///someotherpath/downloads/ " \ 513 "https://.*/.* file:///someotherpath/downloads/ " \
514 "http://.*/.* file:///someotherpath/downloads/" 514 "http://.*/.* file:///someotherpath/downloads/ " \
515 "svn://svn.server1.com/ svn://svn.server2.com/"
515 516
516 def test_urireplace(self): 517 def test_urireplace(self):
517 self.d.setVar("FILESPATH", ".") 518 self.d.setVar("FILESPATH", ".")
@@ -535,6 +536,13 @@ class MirrorUriTest(FetcherTest):
535 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 536 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
536 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 537 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
537 538
539 def test_urilistsvn(self):
540 # Catch svn:// -> svn:// bug
541 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
542 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
543 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
544 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
545
538 def test_mirror_of_mirror(self): 546 def test_mirror_of_mirror(self):
539 # Test if mirror of a mirror works 547 # Test if mirror of a mirror works
540 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" 548 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
@@ -1421,7 +1429,7 @@ class FetchLatestVersionTest(FetcherTest):
1421 # combination version pattern 1429 # combination version pattern
1422 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "") 1430 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1423 : "1.2.0", 1431 : "1.2.0",
1424 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "") 1432 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1425 : "2014.01", 1433 : "2014.01",
1426 # version pattern "yyyymmdd" 1434 # version pattern "yyyymmdd"
1427 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "") 1435 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
@@ -1493,6 +1501,12 @@ class FetchLatestVersionTest(FetcherTest):
1493 : "2.8", 1501 : "2.8",
1494 } 1502 }
1495 1503
1504 test_crate_uris = {
1505 # basic example; version pattern "A.B.C+cargo-D.E.F"
1506 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1507 : "0.9.29"
1508 }
1509
1496 @skipIfNoNetwork() 1510 @skipIfNoNetwork()
1497 def test_git_latest_versionstring(self): 1511 def test_git_latest_versionstring(self):
1498 for k, v in self.test_git_uris.items(): 1512 for k, v in self.test_git_uris.items():
@@ -1511,7 +1525,7 @@ class FetchLatestVersionTest(FetcherTest):
1511 1525
1512 def test_wget_latest_versionstring(self): 1526 def test_wget_latest_versionstring(self):
1513 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1527 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
1514 server = HTTPService(testdata) 1528 server = HTTPService(testdata, host="127.0.0.1")
1515 server.start() 1529 server.start()
1516 port = server.port 1530 port = server.port
1517 try: 1531 try:
@@ -1519,10 +1533,10 @@ class FetchLatestVersionTest(FetcherTest):
1519 self.d.setVar("PN", k[0]) 1533 self.d.setVar("PN", k[0])
1520 checkuri = "" 1534 checkuri = ""
1521 if k[2]: 1535 if k[2]:
1522 checkuri = "http://localhost:%s/" % port + k[2] 1536 checkuri = "http://127.0.0.1:%s/" % port + k[2]
1523 self.d.setVar("UPSTREAM_CHECK_URI", checkuri) 1537 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1524 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) 1538 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
1525 url = "http://localhost:%s/" % port + k[1] 1539 url = "http://127.0.0.1:%s/" % port + k[1]
1526 ud = bb.fetch2.FetchData(url, self.d) 1540 ud = bb.fetch2.FetchData(url, self.d)
1527 pupver = ud.method.latest_versionstring(ud, self.d) 1541 pupver = ud.method.latest_versionstring(ud, self.d)
1528 verstring = pupver[0] 1542 verstring = pupver[0]
@@ -1532,6 +1546,16 @@ class FetchLatestVersionTest(FetcherTest):
1532 finally: 1546 finally:
1533 server.stop() 1547 server.stop()
1534 1548
1549 @skipIfNoNetwork()
1550 def test_crate_latest_versionstring(self):
1551 for k, v in self.test_crate_uris.items():
1552 self.d.setVar("PN", k[0])
1553 ud = bb.fetch2.FetchData(k[1], self.d)
1554 pupver = ud.method.latest_versionstring(ud, self.d)
1555 verstring = pupver[0]
1556 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1557 r = bb.utils.vercmp_string(v, verstring)
1558 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1535 1559
1536class FetchCheckStatusTest(FetcherTest): 1560class FetchCheckStatusTest(FetcherTest):
1537 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1561 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 8b212b7803..4ee45d67a2 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -559,7 +559,10 @@ class ORMWrapper(object):
559 # we might have an invalid link; no way to detect this. just set it to None 559 # we might have an invalid link; no way to detect this. just set it to None
560 filetarget_obj = None 560 filetarget_obj = None
561 561
562 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 try:
563 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564 except Target_File.DoesNotExist:
565 parent_obj = None
563 566
564 Target_File.objects.create( 567 Target_File.objects.create(
565 target = target_obj, 568 target = target_obj,