summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py2
-rw-r--r--bitbake/lib/bb/__init__.py32
-rwxr-xr-xbitbake/lib/bb/acl.py215
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py16
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py313
-rw-r--r--bitbake/lib/bb/asyncrpc/connection.py146
-rw-r--r--bitbake/lib/bb/asyncrpc/exceptions.py21
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py391
-rw-r--r--bitbake/lib/bb/build.py270
-rw-r--r--bitbake/lib/bb/cache.py340
-rw-r--r--bitbake/lib/bb/checksum.py22
-rw-r--r--bitbake/lib/bb/codeparser.py110
-rw-r--r--bitbake/lib/bb/command.py112
-rw-r--r--bitbake/lib/bb/compress/_pipecompress.py196
-rw-r--r--bitbake/lib/bb/compress/lz4.py19
-rw-r--r--bitbake/lib/bb/compress/zstd.py30
-rw-r--r--bitbake/lib/bb/cooker.py758
-rw-r--r--bitbake/lib/bb/cookerdata.py197
-rw-r--r--bitbake/lib/bb/daemonize.py44
-rw-r--r--bitbake/lib/bb/data.py147
-rw-r--r--bitbake/lib/bb/data_smart.py268
-rw-r--r--bitbake/lib/bb/event.py179
-rw-r--r--bitbake/lib/bb/exceptions.py2
-rw-r--r--bitbake/lib/bb/fetch2/README57
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py365
-rw-r--r--bitbake/lib/bb/fetch2/az.py93
-rw-r--r--bitbake/lib/bb/fetch2/crate.py141
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py102
-rw-r--r--bitbake/lib/bb/fetch2/git.py294
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py49
-rw-r--r--bitbake/lib/bb/fetch2/hg.py1
-rw-r--r--bitbake/lib/bb/fetch2/local.py16
-rw-r--r--bitbake/lib/bb/fetch2/npm.py63
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py94
-rw-r--r--bitbake/lib/bb/fetch2/osc.py52
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py2
-rw-r--r--bitbake/lib/bb/fetch2/s3.py41
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py2
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py47
-rw-r--r--bitbake/lib/bb/fetch2/svn.py12
-rw-r--r--bitbake/lib/bb/fetch2/wget.py181
-rwxr-xr-xbitbake/lib/bb/main.py407
-rw-r--r--bitbake/lib/bb/monitordisk.py24
-rw-r--r--bitbake/lib/bb/msg.py34
-rw-r--r--bitbake/lib/bb/parse/__init__.py14
-rw-r--r--bitbake/lib/bb/parse/ast.py85
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py72
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py38
-rw-r--r--bitbake/lib/bb/persist_data.py80
-rw-r--r--bitbake/lib/bb/process.py9
-rw-r--r--bitbake/lib/bb/progress.py9
-rw-r--r--bitbake/lib/bb/providers.py92
-rw-r--r--bitbake/lib/bb/runqueue.py999
-rw-r--r--bitbake/lib/bb/server/process.py380
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py3
-rw-r--r--bitbake/lib/bb/siggen.py750
-rw-r--r--bitbake/lib/bb/taskdata.py14
-rw-r--r--bitbake/lib/bb/tests/codeparser.py66
-rw-r--r--bitbake/lib/bb/tests/color.py4
-rw-r--r--bitbake/lib/bb/tests/compression.py100
-rw-r--r--bitbake/lib/bb/tests/cooker.py2
-rw-r--r--bitbake/lib/bb/tests/data.py132
-rw-r--r--bitbake/lib/bb/tests/event.py62
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html59
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html20
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html40
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html19
-rw-r--r--bitbake/lib/bb/tests/fetch.py1197
-rw-r--r--bitbake/lib/bb/tests/parse.py179
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf2
-rw-r--r--bitbake/lib/bb/tests/runqueue.py54
-rw-r--r--bitbake/lib/bb/tests/siggen.py77
-rw-r--r--bitbake/lib/bb/tests/utils.py20
-rw-r--r--bitbake/lib/bb/tinfoil.py32
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py96
-rw-r--r--bitbake/lib/bb/ui/eventreplay.py86
-rw-r--r--bitbake/lib/bb/ui/knotty.py193
-rw-r--r--bitbake/lib/bb/ui/ncurses.py3
-rw-r--r--bitbake/lib/bb/ui/taskexp.py7
-rwxr-xr-xbitbake/lib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--bitbake/lib/bb/ui/toasterui.py2
-rw-r--r--bitbake/lib/bb/ui/uievent.py32
-rw-r--r--bitbake/lib/bb/ui/uihelper.py6
-rw-r--r--bitbake/lib/bb/utils.py315
-rwxr-xr-xbitbake/lib/bb/xattr.py126
85 files changed, 9871 insertions, 3023 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 23c22b65ef..76bc08a3ea 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -3,6 +3,8 @@
3# 3#
4# Copyright (C) 2006 Tim Ansell 4# Copyright (C) 2006 Tim Ansell
5# 5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
6# Please Note: 8# Please Note:
7# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. 9# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
8# Assign a file to __warn__ to get warnings about slow operations. 10# Assign a file to __warn__ to get warnings about slow operations.
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 84a9051c13..15013540c2 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,12 +9,19 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "1.49.2" 12__version__ = "2.9.0"
13 13
14import sys 14import sys
15if sys.version_info < (3, 5, 0): 15if sys.version_info < (3, 8, 0):
16 raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.8.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
20 # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
21 # https://bugs.ams1.psf.io/issue42888
22 # so ensure libgcc_s is loaded early on
23 import ctypes
24 libgcc_s = ctypes.CDLL('libgcc_s.so.1')
18 25
19class BBHandledException(Exception): 26class BBHandledException(Exception):
20 """ 27 """
@@ -58,8 +65,12 @@ class BBLoggerMixin(object):
58 if not bb.event.worker_pid: 65 if not bb.event.worker_pid:
59 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): 66 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
60 return 67 return
61 if loglevel > bb.msg.loggerDefaultLogLevel: 68 if loglevel < bb.msg.loggerDefaultLogLevel:
62 return 69 return
70
71 if not isinstance(level, int) or not isinstance(msg, str):
72 mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args))
73
63 return self.log(loglevel, msg, *args, **kwargs) 74 return self.log(loglevel, msg, *args, **kwargs)
64 75
65 def plain(self, msg, *args, **kwargs): 76 def plain(self, msg, *args, **kwargs):
@@ -71,6 +82,13 @@ class BBLoggerMixin(object):
71 def verbnote(self, msg, *args, **kwargs): 82 def verbnote(self, msg, *args, **kwargs):
72 return self.log(logging.INFO + 2, msg, *args, **kwargs) 83 return self.log(logging.INFO + 2, msg, *args, **kwargs)
73 84
85 def warnonce(self, msg, *args, **kwargs):
86 return self.log(logging.WARNING - 1, msg, *args, **kwargs)
87
88 def erroronce(self, msg, *args, **kwargs):
89 return self.log(logging.ERROR - 1, msg, *args, **kwargs)
90
91
74Logger = logging.getLoggerClass() 92Logger = logging.getLoggerClass()
75class BBLogger(Logger, BBLoggerMixin): 93class BBLogger(Logger, BBLoggerMixin):
76 def __init__(self, name, *args, **kwargs): 94 def __init__(self, name, *args, **kwargs):
@@ -157,9 +175,15 @@ def verbnote(*args):
157def warn(*args): 175def warn(*args):
158 mainlogger.warning(''.join(args)) 176 mainlogger.warning(''.join(args))
159 177
178def warnonce(*args):
179 mainlogger.warnonce(''.join(args))
180
160def error(*args, **kwargs): 181def error(*args, **kwargs):
161 mainlogger.error(''.join(args), extra=kwargs) 182 mainlogger.error(''.join(args), extra=kwargs)
162 183
184def erroronce(*args):
185 mainlogger.erroronce(''.join(args))
186
163def fatal(*args, **kwargs): 187def fatal(*args, **kwargs):
164 mainlogger.critical(''.join(args), extra=kwargs) 188 mainlogger.critical(''.join(args), extra=kwargs)
165 raise BBHandledException() 189 raise BBHandledException()
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
new file mode 100755
index 0000000000..0f41b275cf
--- /dev/null
+++ b/bitbake/lib/bb/acl.py
@@ -0,0 +1,215 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7
8import sys
9import ctypes
10import os
11import errno
12import pwd
13import grp
14
15libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
16
17
18ACL_TYPE_ACCESS = 0x8000
19ACL_TYPE_DEFAULT = 0x4000
20
21ACL_FIRST_ENTRY = 0
22ACL_NEXT_ENTRY = 1
23
24ACL_UNDEFINED_TAG = 0x00
25ACL_USER_OBJ = 0x01
26ACL_USER = 0x02
27ACL_GROUP_OBJ = 0x04
28ACL_GROUP = 0x08
29ACL_MASK = 0x10
30ACL_OTHER = 0x20
31
32ACL_READ = 0x04
33ACL_WRITE = 0x02
34ACL_EXECUTE = 0x01
35
36acl_t = ctypes.c_void_p
37acl_entry_t = ctypes.c_void_p
38acl_permset_t = ctypes.c_void_p
39acl_perm_t = ctypes.c_uint
40
41acl_tag_t = ctypes.c_int
42
43libacl.acl_free.argtypes = [acl_t]
44
45
46def acl_free(acl):
47 libacl.acl_free(acl)
48
49
50libacl.acl_get_file.restype = acl_t
51libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
52
53
54def acl_get_file(path, typ):
55 acl = libacl.acl_get_file(os.fsencode(path), typ)
56 if acl is None:
57 err = ctypes.get_errno()
58 raise OSError(err, os.strerror(err), str(path))
59
60 return acl
61
62
63libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
64
65
66def acl_get_entry(acl, entry_id):
67 entry = acl_entry_t()
68 ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
69 if ret < 0:
70 err = ctypes.get_errno()
71 raise OSError(err, os.strerror(err))
72
73 if ret == 0:
74 return None
75
76 return entry
77
78
79libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
80
81
82def acl_get_tag_type(entry_d):
83 tag = acl_tag_t()
84 ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
85 if ret < 0:
86 err = ctypes.get_errno()
87 raise OSError(err, os.strerror(err))
88 return tag.value
89
90
91libacl.acl_get_qualifier.restype = ctypes.c_void_p
92libacl.acl_get_qualifier.argtypes = [acl_entry_t]
93
94
95def acl_get_qualifier(entry_d):
96 ret = libacl.acl_get_qualifier(entry_d)
97 if ret is None:
98 err = ctypes.get_errno()
99 raise OSError(err, os.strerror(err))
100 return ctypes.c_void_p(ret)
101
102
103libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
104
105
106def acl_get_permset(entry_d):
107 permset = acl_permset_t()
108 ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
109 if ret < 0:
110 err = ctypes.get_errno()
111 raise OSError(err, os.strerror(err))
112
113 return permset
114
115
116libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
117
118
119def acl_get_perm(permset_d, perm):
120 ret = libacl.acl_get_perm(permset_d, perm)
121 if ret < 0:
122 err = ctypes.get_errno()
123 raise OSError(err, os.strerror(err))
124 return bool(ret)
125
126
127class Entry(object):
128 def __init__(self, tag, qualifier, mode):
129 self.tag = tag
130 self.qualifier = qualifier
131 self.mode = mode
132
133 def __str__(self):
134 typ = ""
135 qual = ""
136 if self.tag == ACL_USER:
137 typ = "user"
138 qual = pwd.getpwuid(self.qualifier).pw_name
139 elif self.tag == ACL_GROUP:
140 typ = "group"
141 qual = grp.getgrgid(self.qualifier).gr_name
142 elif self.tag == ACL_USER_OBJ:
143 typ = "user"
144 elif self.tag == ACL_GROUP_OBJ:
145 typ = "group"
146 elif self.tag == ACL_MASK:
147 typ = "mask"
148 elif self.tag == ACL_OTHER:
149 typ = "other"
150
151 r = "r" if self.mode & ACL_READ else "-"
152 w = "w" if self.mode & ACL_WRITE else "-"
153 x = "x" if self.mode & ACL_EXECUTE else "-"
154
155 return f"{typ}:{qual}:{r}{w}{x}"
156
157
158class ACL(object):
159 def __init__(self, acl):
160 self.acl = acl
161
162 def __del__(self):
163 acl_free(self.acl)
164
165 def entries(self):
166 entry_id = ACL_FIRST_ENTRY
167 while True:
168 entry = acl_get_entry(self.acl, entry_id)
169 if entry is None:
170 break
171
172 permset = acl_get_permset(entry)
173
174 mode = 0
175 for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
176 if acl_get_perm(permset, m):
177 mode |= m
178
179 qualifier = None
180 tag = acl_get_tag_type(entry)
181
182 if tag == ACL_USER or tag == ACL_GROUP:
183 qual = acl_get_qualifier(entry)
184 qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
185
186 yield Entry(tag, qualifier, mode)
187
188 entry_id = ACL_NEXT_ENTRY
189
190 @classmethod
191 def from_path(cls, path, typ):
192 acl = acl_get_file(path, typ)
193 return cls(acl)
194
195
196def main():
197 import argparse
198 import pwd
199 import grp
200 from pathlib import Path
201
202 parser = argparse.ArgumentParser()
203 parser.add_argument("path", help="File Path", type=Path)
204
205 args = parser.parse_args()
206
207 acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
208 for entry in acl.entries():
209 print(str(entry))
210
211 return 0
212
213
214if __name__ == "__main__":
215 sys.exit(main())
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
new file mode 100644
index 0000000000..639e1607f8
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -0,0 +1,16 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8from .client import AsyncClient, Client, ClientPool
9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import (
12 ClientError,
13 ServerError,
14 ConnectionClosedError,
15 InvokeError,
16)
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
new file mode 100644
index 0000000000..a350b4fb12
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -0,0 +1,313 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import socket
12import sys
13import re
14import contextlib
15from threading import Thread
16from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
17from .exceptions import ConnectionClosedError, InvokeError
18
19UNIX_PREFIX = "unix://"
20WS_PREFIX = "ws://"
21WSS_PREFIX = "wss://"
22
23ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2
26
27def parse_address(addr):
28 if addr.startswith(UNIX_PREFIX):
29 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
30 elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
31 return (ADDR_TYPE_WS, (addr,))
32 else:
33 m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
34 if m is not None:
35 host = m.group("host")
36 port = m.group("port")
37 else:
38 host, port = addr.split(":")
39
40 return (ADDR_TYPE_TCP, (host, int(port)))
41
42class AsyncClient(object):
43 def __init__(
44 self,
45 proto_name,
46 proto_version,
47 logger,
48 timeout=30,
49 server_headers=False,
50 headers={},
51 ):
52 self.socket = None
53 self.max_chunk = DEFAULT_MAX_CHUNK
54 self.proto_name = proto_name
55 self.proto_version = proto_version
56 self.logger = logger
57 self.timeout = timeout
58 self.needs_server_headers = server_headers
59 self.server_headers = {}
60 self.headers = headers
61
62 async def connect_tcp(self, address, port):
63 async def connect_sock():
64 reader, writer = await asyncio.open_connection(address, port)
65 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
66
67 self._connect_sock = connect_sock
68
69 async def connect_unix(self, path):
70 async def connect_sock():
71 # AF_UNIX has path length issues so chdir here to workaround
72 cwd = os.getcwd()
73 try:
74 os.chdir(os.path.dirname(path))
75 # The socket must be opened synchronously so that CWD doesn't get
76 # changed out from underneath us so we pass as a sock into asyncio
77 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
78 sock.connect(os.path.basename(path))
79 finally:
80 os.chdir(cwd)
81 reader, writer = await asyncio.open_unix_connection(sock=sock)
82 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
83
84 self._connect_sock = connect_sock
85
86 async def connect_websocket(self, uri):
87 import websockets
88
89 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None)
91 return WebsocketConnection(websocket, self.timeout)
92
93 self._connect_sock = connect_sock
94
95 async def setup_connection(self):
96 # Send headers
97 await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
98 await self.socket.send(
99 "needs-headers: %s" % ("true" if self.needs_server_headers else "false")
100 )
101 for k, v in self.headers.items():
102 await self.socket.send("%s: %s" % (k, v))
103
104 # End of headers
105 await self.socket.send("")
106
107 self.server_headers = {}
108 if self.needs_server_headers:
109 while True:
110 line = await self.socket.recv()
111 if not line:
112 # End headers
113 break
114 tag, value = line.split(":", 1)
115 self.server_headers[tag.lower()] = value.strip()
116
117 async def get_header(self, tag, default):
118 await self.connect()
119 return self.server_headers.get(tag, default)
120
121 async def connect(self):
122 if self.socket is None:
123 self.socket = await self._connect_sock()
124 await self.setup_connection()
125
126 async def disconnect(self):
127 if self.socket is not None:
128 await self.socket.close()
129 self.socket = None
130
131 async def close(self):
132 await self.disconnect()
133
134 async def _send_wrapper(self, proc):
135 count = 0
136 while True:
137 try:
138 await self.connect()
139 return await proc()
140 except (
141 OSError,
142 ConnectionError,
143 ConnectionClosedError,
144 json.JSONDecodeError,
145 UnicodeDecodeError,
146 ) as e:
147 self.logger.warning("Error talking to server: %s" % e)
148 if count >= 3:
149 if not isinstance(e, ConnectionError):
150 raise ConnectionError(str(e))
151 raise e
152 await self.close()
153 count += 1
154
155 def check_invoke_error(self, msg):
156 if isinstance(msg, dict) and "invoke-error" in msg:
157 raise InvokeError(msg["invoke-error"]["message"])
158
159 async def invoke(self, msg):
160 async def proc():
161 await self.socket.send_message(msg)
162 return await self.socket.recv_message()
163
164 result = await self._send_wrapper(proc)
165 self.check_invoke_error(result)
166 return result
167
168 async def ping(self):
169 return await self.invoke({"ping": {}})
170
171 async def __aenter__(self):
172 return self
173
174 async def __aexit__(self, exc_type, exc_value, traceback):
175 await self.close()
176
177
178class Client(object):
179 def __init__(self):
180 self.client = self._get_async_client()
181 self.loop = asyncio.new_event_loop()
182
183 # Override any pre-existing loop.
184 # Without this, the PR server export selftest triggers a hang
185 # when running with Python 3.7. The drawback is that there is
186 # potential for issues if the PR and hash equiv (or some new)
187 # clients need to both be instantiated in the same process.
188 # This should be revisited if/when Python 3.9 becomes the
189 # minimum required version for BitBake, as it seems not
190 # required (but harmless) with it.
191 asyncio.set_event_loop(self.loop)
192
193 self._add_methods("connect_tcp", "ping")
194
195 @abc.abstractmethod
196 def _get_async_client(self):
197 pass
198
199 def _get_downcall_wrapper(self, downcall):
200 def wrapper(*args, **kwargs):
201 return self.loop.run_until_complete(downcall(*args, **kwargs))
202
203 return wrapper
204
205 def _add_methods(self, *methods):
206 for m in methods:
207 downcall = getattr(self.client, m)
208 setattr(self, m, self._get_downcall_wrapper(downcall))
209
210 def connect_unix(self, path):
211 self.loop.run_until_complete(self.client.connect_unix(path))
212 self.loop.run_until_complete(self.client.connect())
213
214 @property
215 def max_chunk(self):
216 return self.client.max_chunk
217
218 @max_chunk.setter
219 def max_chunk(self, value):
220 self.client.max_chunk = value
221
222 def disconnect(self):
223 self.loop.run_until_complete(self.client.close())
224
225 def close(self):
226 if self.loop:
227 self.loop.run_until_complete(self.client.close())
228 if sys.version_info >= (3, 6):
229 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
230 self.loop.close()
231 self.loop = None
232
233 def __enter__(self):
234 return self
235
236 def __exit__(self, exc_type, exc_value, traceback):
237 self.close()
238 return False
239
240
241class ClientPool(object):
242 def __init__(self, max_clients):
243 self.avail_clients = []
244 self.num_clients = 0
245 self.max_clients = max_clients
246 self.loop = None
247 self.client_condition = None
248
249 @abc.abstractmethod
250 async def _new_client(self):
251 raise NotImplementedError("Must be implemented in derived class")
252
253 def close(self):
254 if self.client_condition:
255 self.client_condition = None
256
257 if self.loop:
258 self.loop.run_until_complete(self.__close_clients())
259 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
260 self.loop.close()
261 self.loop = None
262
263 def run_tasks(self, tasks):
264 if not self.loop:
265 self.loop = asyncio.new_event_loop()
266
267 thread = Thread(target=self.__thread_main, args=(tasks,))
268 thread.start()
269 thread.join()
270
271 @contextlib.asynccontextmanager
272 async def get_client(self):
273 async with self.client_condition:
274 if self.avail_clients:
275 client = self.avail_clients.pop()
276 elif self.num_clients < self.max_clients:
277 self.num_clients += 1
278 client = await self._new_client()
279 else:
280 while not self.avail_clients:
281 await self.client_condition.wait()
282 client = self.avail_clients.pop()
283
284 try:
285 yield client
286 finally:
287 async with self.client_condition:
288 self.avail_clients.append(client)
289 self.client_condition.notify()
290
291 def __thread_main(self, tasks):
292 async def process_task(task):
293 async with self.get_client() as client:
294 await task(client)
295
296 asyncio.set_event_loop(self.loop)
297 if not self.client_condition:
298 self.client_condition = asyncio.Condition()
299 tasks = [process_task(t) for t in tasks]
300 self.loop.run_until_complete(asyncio.gather(*tasks))
301
302 async def __close_clients(self):
303 for c in self.avail_clients:
304 await c.close()
305 self.avail_clients = []
306 self.num_clients = 0
307
308 def __enter__(self):
309 return self
310
311 def __exit__(self, exc_type, exc_value, traceback):
312 self.close()
313 return False
diff --git a/bitbake/lib/bb/asyncrpc/connection.py b/bitbake/lib/bb/asyncrpc/connection.py
new file mode 100644
index 0000000000..7f0cf6ba96
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import asyncio
8import itertools
9import json
10from datetime import datetime
11from .exceptions import ClientError, ConnectionClosedError
12
13
14# The Python async server defaults to a 64K receive buffer, so we hardcode our
15# maximum chunk size. It would be better if the client and server reported to
16# each other what the maximum chunk sizes were, but that will slow down the
17# connection setup with a round trip delay so I'd rather not do that unless it
18# is necessary
19DEFAULT_MAX_CHUNK = 32 * 1024
20
21
22def chunkify(msg, max_chunk):
23 if len(msg) < max_chunk - 1:
24 yield "".join((msg, "\n"))
25 else:
26 yield "".join((json.dumps({"chunk-stream": None}), "\n"))
27
28 args = [iter(msg)] * (max_chunk - 1)
29 for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
30 yield "".join(itertools.chain(m, "\n"))
31 yield "\n"
32
33
34def json_serialize(obj):
35 if isinstance(obj, datetime):
36 return obj.isoformat()
37 raise TypeError("Type %s not serializeable" % type(obj))
38
39
40class StreamConnection(object):
41 def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
42 self.reader = reader
43 self.writer = writer
44 self.timeout = timeout
45 self.max_chunk = max_chunk
46
47 @property
48 def address(self):
49 return self.writer.get_extra_info("peername")
50
51 async def send_message(self, msg):
52 for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
53 self.writer.write(c.encode("utf-8"))
54 await self.writer.drain()
55
56 async def recv_message(self):
57 l = await self.recv()
58
59 m = json.loads(l)
60 if not m:
61 return m
62
63 if "chunk-stream" in m:
64 lines = []
65 while True:
66 l = await self.recv()
67 if not l:
68 break
69 lines.append(l)
70
71 m = json.loads("".join(lines))
72
73 return m
74
75 async def send(self, msg):
76 self.writer.write(("%s\n" % msg).encode("utf-8"))
77 await self.writer.drain()
78
79 async def recv(self):
80 if self.timeout < 0:
81 line = await self.reader.readline()
82 else:
83 try:
84 line = await asyncio.wait_for(self.reader.readline(), self.timeout)
85 except asyncio.TimeoutError:
86 raise ConnectionError("Timed out waiting for data")
87
88 if not line:
89 raise ConnectionClosedError("Connection closed")
90
91 line = line.decode("utf-8")
92
93 if not line.endswith("\n"):
94 raise ConnectionError("Bad message %r" % (line))
95
96 return line.rstrip()
97
98 async def close(self):
99 self.reader = None
100 if self.writer is not None:
101 self.writer.close()
102 self.writer = None
103
104
105class WebsocketConnection(object):
106 def __init__(self, socket, timeout):
107 self.socket = socket
108 self.timeout = timeout
109
110 @property
111 def address(self):
112 return ":".join(str(s) for s in self.socket.remote_address)
113
114 async def send_message(self, msg):
115 await self.send(json.dumps(msg, default=json_serialize))
116
117 async def recv_message(self):
118 m = await self.recv()
119 return json.loads(m)
120
121 async def send(self, msg):
122 import websockets.exceptions
123
124 try:
125 await self.socket.send(msg)
126 except websockets.exceptions.ConnectionClosed:
127 raise ConnectionClosedError("Connection closed")
128
129 async def recv(self):
130 import websockets.exceptions
131
132 try:
133 if self.timeout < 0:
134 return await self.socket.recv()
135
136 try:
137 return await asyncio.wait_for(self.socket.recv(), self.timeout)
138 except asyncio.TimeoutError:
139 raise ConnectionError("Timed out waiting for data")
140 except websockets.exceptions.ConnectionClosed:
141 raise ConnectionClosedError("Connection closed")
142
143 async def close(self):
144 if self.socket is not None:
145 await self.socket.close()
146 self.socket = None
diff --git a/bitbake/lib/bb/asyncrpc/exceptions.py b/bitbake/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 0000000000..ae1043a38b
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8class ClientError(Exception):
9 pass
10
11
12class InvokeError(Exception):
13 pass
14
15
16class ServerError(Exception):
17 pass
18
19
20class ConnectionClosedError(Exception):
21 pass
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
new file mode 100644
index 0000000000..a66117acad
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -0,0 +1,391 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import signal
12import socket
13import sys
14import multiprocessing
15import logging
16from .connection import StreamConnection, WebsocketConnection
17from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
18
19
20class ClientLoggerAdapter(logging.LoggerAdapter):
21 def process(self, msg, kwargs):
22 return f"[Client {self.extra['address']}] {msg}", kwargs
23
24
25class AsyncServerConnection(object):
26 # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
27 # return message will be automatically be sent back to the client
28 NO_RESPONSE = object()
29
30 def __init__(self, socket, proto_name, logger):
31 self.socket = socket
32 self.proto_name = proto_name
33 self.handlers = {
34 "ping": self.handle_ping,
35 }
36 self.logger = ClientLoggerAdapter(
37 logger,
38 {
39 "address": socket.address,
40 },
41 )
42 self.client_headers = {}
43
44 async def close(self):
45 await self.socket.close()
46
47 async def handle_headers(self, headers):
48 return {}
49
50 async def process_requests(self):
51 try:
52 self.logger.info("Client %r connected" % (self.socket.address,))
53
54 # Read protocol and version
55 client_protocol = await self.socket.recv()
56 if not client_protocol:
57 return
58
59 (client_proto_name, client_proto_version) = client_protocol.split()
60 if client_proto_name != self.proto_name:
61 self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
62 return
63
64 self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
65 if not self.validate_proto_version():
66 self.logger.debug(
67 "Rejecting invalid protocol version %s" % (client_proto_version)
68 )
69 return
70
71 # Read headers
72 self.client_headers = {}
73 while True:
74 header = await self.socket.recv()
75 if not header:
76 # Empty line. End of headers
77 break
78 tag, value = header.split(":", 1)
79 self.client_headers[tag.lower()] = value.strip()
80
81 if self.client_headers.get("needs-headers", "false") == "true":
82 for k, v in (await self.handle_headers(self.client_headers)).items():
83 await self.socket.send("%s: %s" % (k, v))
84 await self.socket.send("")
85
86 # Handle messages
87 while True:
88 d = await self.socket.recv_message()
89 if d is None:
90 break
91 try:
92 response = await self.dispatch_message(d)
93 except InvokeError as e:
94 await self.socket.send_message(
95 {"invoke-error": {"message": str(e)}}
96 )
97 break
98
99 if response is not self.NO_RESPONSE:
100 await self.socket.send_message(response)
101
102 except ConnectionClosedError as e:
103 self.logger.info(str(e))
104 except (ClientError, ConnectionError) as e:
105 self.logger.error(str(e))
106 finally:
107 await self.close()
108
109 async def dispatch_message(self, msg):
110 for k in self.handlers.keys():
111 if k in msg:
112 self.logger.debug("Handling %s" % k)
113 return await self.handlers[k](msg[k])
114
115 raise ClientError("Unrecognized command %r" % msg)
116
117 async def handle_ping(self, request):
118 return {"alive": True}
119
120
121class StreamServer(object):
122 def __init__(self, handler, logger):
123 self.handler = handler
124 self.logger = logger
125 self.closed = False
126
127 async def handle_stream_client(self, reader, writer):
128 # writer.transport.set_write_buffer_limits(0)
129 socket = StreamConnection(reader, writer, -1)
130 if self.closed:
131 await socket.close()
132 return
133
134 await self.handler(socket)
135
136 async def stop(self):
137 self.closed = True
138
139
140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger):
142 super().__init__(handler, logger)
143 self.host = host
144 self.port = port
145
146 def start(self, loop):
147 self.server = loop.run_until_complete(
148 asyncio.start_server(self.handle_stream_client, self.host, self.port)
149 )
150
151 for s in self.server.sockets:
152 self.logger.debug("Listening on %r" % (s.getsockname(),))
153 # Newer python does this automatically. Do it manually here for
154 # maximum compatibility
155 s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
156 s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
157
158 # Enable keep alives. This prevents broken client connections
159 # from persisting on the server for long periods of time.
160 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
161 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
162 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
163 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
164
165 name = self.server.sockets[0].getsockname()
166 if self.server.sockets[0].family == socket.AF_INET6:
167 self.address = "[%s]:%d" % (name[0], name[1])
168 else:
169 self.address = "%s:%d" % (name[0], name[1])
170
171 return [self.server.wait_closed()]
172
173 async def stop(self):
174 await super().stop()
175 self.server.close()
176
177 def cleanup(self):
178 pass
179
180
181class UnixStreamServer(StreamServer):
182 def __init__(self, path, handler, logger):
183 super().__init__(handler, logger)
184 self.path = path
185
186 def start(self, loop):
187 cwd = os.getcwd()
188 try:
189 # Work around path length limits in AF_UNIX
190 os.chdir(os.path.dirname(self.path))
191 self.server = loop.run_until_complete(
192 asyncio.start_unix_server(
193 self.handle_stream_client, os.path.basename(self.path)
194 )
195 )
196 finally:
197 os.chdir(cwd)
198
199 self.logger.debug("Listening on %r" % self.path)
200 self.address = "unix://%s" % os.path.abspath(self.path)
201 return [self.server.wait_closed()]
202
203 async def stop(self):
204 await super().stop()
205 self.server.close()
206
207 def cleanup(self):
208 os.unlink(self.path)
209
210
211class WebsocketsServer(object):
212 def __init__(self, host, port, handler, logger):
213 self.host = host
214 self.port = port
215 self.handler = handler
216 self.logger = logger
217
218 def start(self, loop):
219 import websockets.server
220
221 self.server = loop.run_until_complete(
222 websockets.server.serve(
223 self.client_handler,
224 self.host,
225 self.port,
226 ping_interval=None,
227 )
228 )
229
230 for s in self.server.sockets:
231 self.logger.debug("Listening on %r" % (s.getsockname(),))
232
233 # Enable keep alives. This prevents broken client connections
234 # from persisting on the server for long periods of time.
235 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
236 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
237 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
238 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
239
240 name = self.server.sockets[0].getsockname()
241 if self.server.sockets[0].family == socket.AF_INET6:
242 self.address = "ws://[%s]:%d" % (name[0], name[1])
243 else:
244 self.address = "ws://%s:%d" % (name[0], name[1])
245
246 return [self.server.wait_closed()]
247
248 async def stop(self):
249 self.server.close()
250
251 def cleanup(self):
252 pass
253
254 async def client_handler(self, websocket):
255 socket = WebsocketConnection(websocket, -1)
256 await self.handler(socket)
257
258
259class AsyncServer(object):
260 def __init__(self, logger):
261 self.logger = logger
262 self.loop = None
263 self.run_tasks = []
264
265 def start_tcp_server(self, host, port):
266 self.server = TCPStreamServer(host, port, self._client_handler, self.logger)
267
268 def start_unix_server(self, path):
269 self.server = UnixStreamServer(path, self._client_handler, self.logger)
270
271 def start_websocket_server(self, host, port):
272 self.server = WebsocketsServer(host, port, self._client_handler, self.logger)
273
274 async def _client_handler(self, socket):
275 address = socket.address
276 try:
277 client = self.accept_client(socket)
278 await client.process_requests()
279 except Exception as e:
280 import traceback
281
282 self.logger.error(
283 "Error from client %s: %s" % (address, str(e)), exc_info=True
284 )
285 traceback.print_exc()
286 finally:
287 self.logger.debug("Client %s disconnected", address)
288 await socket.close()
289
290 @abc.abstractmethod
291 def accept_client(self, socket):
292 pass
293
294 async def stop(self):
295 self.logger.debug("Stopping server")
296 await self.server.stop()
297
298 def start(self):
299 tasks = self.server.start(self.loop)
300 self.address = self.server.address
301 return tasks
302
303 def signal_handler(self):
304 self.logger.debug("Got exit signal")
305 self.loop.create_task(self.stop())
306
307 def _serve_forever(self, tasks):
308 try:
309 self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
310 self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
311 self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
312 signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
313
314 self.loop.run_until_complete(asyncio.gather(*tasks))
315
316 self.logger.debug("Server shutting down")
317 finally:
318 self.server.cleanup()
319
320 def serve_forever(self):
321 """
322 Serve requests in the current process
323 """
324 self._create_loop()
325 tasks = self.start()
326 self._serve_forever(tasks)
327 self.loop.close()
328
329 def _create_loop(self):
330 # Create loop and override any loop that may have existed in
331 # a parent process. It is possible that the usecases of
332 # serve_forever might be constrained enough to allow using
333 # get_event_loop here, but better safe than sorry for now.
334 self.loop = asyncio.new_event_loop()
335 asyncio.set_event_loop(self.loop)
336
337 def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
338 """
339 Serve requests in a child process
340 """
341
342 def run(queue):
343 # Create loop and override any loop that may have existed
344 # in a parent process. Without doing this and instead
345 # using get_event_loop, at the very minimum the hashserv
346 # unit tests will hang when running the second test.
347 # This happens since get_event_loop in the spawned server
348 # process for the second testcase ends up with the loop
349 # from the hashserv client created in the unit test process
350 # when running the first testcase. The problem is somewhat
351 # more general, though, as any potential use of asyncio in
352 # Cooker could create a loop that needs to replaced in this
353 # new process.
354 self._create_loop()
355 try:
356 self.address = None
357 tasks = self.start()
358 finally:
359 # Always put the server address to wake up the parent task
360 queue.put(self.address)
361 queue.close()
362
363 if prefunc is not None:
364 prefunc(self, *args)
365
366 if log_level is not None:
367 self.logger.setLevel(log_level)
368
369 self._serve_forever(tasks)
370
371 if sys.version_info >= (3, 6):
372 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
373 self.loop.close()
374
375 queue = multiprocessing.Queue()
376
377 # Temporarily block SIGTERM. The server process will inherit this
378 # block which will ensure it doesn't receive the SIGTERM until the
379 # handler is ready for it
380 mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM])
381 try:
382 self.process = multiprocessing.Process(target=run, args=(queue,))
383 self.process.start()
384
385 self.address = queue.get()
386 queue.close()
387 queue.join_thread()
388
389 return self.process
390 finally:
391 signal.pthread_sigmask(signal.SIG_SETMASK, mask)
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index f4f897e41a..44d08f5c55 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
20import time 20import time
21import re 21import re
22import stat 22import stat
23import datetime
23import bb 24import bb
24import bb.msg 25import bb.msg
25import bb.process 26import bb.process
26import bb.progress 27import bb.progress
28from io import StringIO
27from bb import data, event, utils 29from bb import data, event, utils
28 30
29bblogger = logging.getLogger('BitBake') 31bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
176 178
177 @property 179 @property
178 def name(self): 180 def name(self):
179 return sys.stdout.name 181 if "name" in dir(sys.stdout):
182 return sys.stdout.name
183 return "<mem>"
180 184
181 185
182def exec_func(func, d, dirs = None): 186def exec_func(func, d, dirs = None):
@@ -295,9 +299,25 @@ def exec_func_python(func, d, runfile, cwd=None):
295 lineno = int(d.getVarFlag(func, "lineno", False)) 299 lineno = int(d.getVarFlag(func, "lineno", False))
296 bb.methodpool.insert_method(func, text, fn, lineno - 1) 300 bb.methodpool.insert_method(func, text, fn, lineno - 1)
297 301
298 comp = utils.better_compile(code, func, "exec_python_func() autogenerated") 302 if verboseStdoutLogging:
299 utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") 303 sys.stdout.flush()
304 sys.stderr.flush()
305 currout = sys.stdout
306 currerr = sys.stderr
307 sys.stderr = sys.stdout = execio = StringIO()
308 comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
309 utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
300 finally: 310 finally:
311 if verboseStdoutLogging:
312 execio.flush()
313 logger.plain("%s" % execio.getvalue())
314 sys.stdout = currout
315 sys.stderr = currerr
316 execio.close()
317 # We want any stdout/stderr to be printed before any other log messages to make debugging
318 # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
319 sys.stdout.flush()
320 sys.stderr.flush()
301 bb.debug(2, "Python function %s finished" % func) 321 bb.debug(2, "Python function %s finished" % func)
302 322
303 if cwd and olddir: 323 if cwd and olddir:
@@ -436,7 +456,11 @@ exit $ret
436 if fakerootcmd: 456 if fakerootcmd:
437 cmd = [fakerootcmd, runfile] 457 cmd = [fakerootcmd, runfile]
438 458
439 if verboseStdoutLogging: 459 # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
460 # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
461 # ie. inside another function, in which case stdout is already being captured so we don't
462 # want to Tee here as output would be printed twice, and out of order.
463 if verboseStdoutLogging and sys.stdout == sys.__stdout__:
440 logfile = LogTee(logger, StdoutNoopContextManager()) 464 logfile = LogTee(logger, StdoutNoopContextManager())
441 else: 465 else:
442 logfile = StdoutNoopContextManager() 466 logfile = StdoutNoopContextManager()
@@ -565,10 +589,8 @@ exit $ret
565def _task_data(fn, task, d): 589def _task_data(fn, task, d):
566 localdata = bb.data.createCopy(d) 590 localdata = bb.data.createCopy(d)
567 localdata.setVar('BB_FILENAME', fn) 591 localdata.setVar('BB_FILENAME', fn)
568 localdata.setVar('BB_CURRENTTASK', task[3:])
569 localdata.setVar('OVERRIDES', 'task-%s:%s' % 592 localdata.setVar('OVERRIDES', 'task-%s:%s' %
570 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) 593 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
571 localdata.finalize()
572 bb.data.expandKeys(localdata) 594 bb.data.expandKeys(localdata)
573 return localdata 595 return localdata
574 596
@@ -579,7 +601,7 @@ def _exec_task(fn, task, d, quieterr):
579 running it with its own local metadata, and with some useful variables set. 601 running it with its own local metadata, and with some useful variables set.
580 """ 602 """
581 if not d.getVarFlag(task, 'task', False): 603 if not d.getVarFlag(task, 'task', False):
582 event.fire(TaskInvalid(task, d), d) 604 event.fire(TaskInvalid(task, fn, d), d)
583 logger.error("No such task: %s" % task) 605 logger.error("No such task: %s" % task)
584 return 1 606 return 1
585 607
@@ -615,7 +637,8 @@ def _exec_task(fn, task, d, quieterr):
615 logorder = os.path.join(tempdir, 'log.task_order') 637 logorder = os.path.join(tempdir, 'log.task_order')
616 try: 638 try:
617 with open(logorder, 'a') as logorderfile: 639 with open(logorder, 'a') as logorderfile:
618 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) 640 timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
641 logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
619 except OSError: 642 except OSError:
620 logger.exception("Opening log file '%s'", logorder) 643 logger.exception("Opening log file '%s'", logorder)
621 pass 644 pass
@@ -682,47 +705,55 @@ def _exec_task(fn, task, d, quieterr):
682 try: 705 try:
683 try: 706 try:
684 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) 707 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
685 except (bb.BBHandledException, SystemExit):
686 return 1
687 708
688 try:
689 for func in (prefuncs or '').split(): 709 for func in (prefuncs or '').split():
690 exec_func(func, localdata) 710 exec_func(func, localdata)
691 exec_func(task, localdata) 711 exec_func(task, localdata)
692 for func in (postfuncs or '').split(): 712 for func in (postfuncs or '').split():
693 exec_func(func, localdata) 713 exec_func(func, localdata)
694 except bb.BBHandledException: 714 finally:
695 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) 715 # Need to flush and close the logs before sending events where the
696 return 1 716 # UI may try to look at the logs.
697 except Exception as exc: 717 sys.stdout.flush()
698 if quieterr: 718 sys.stderr.flush()
699 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 719
700 else: 720 bblogger.removeHandler(handler)
701 errprinted = errchk.triggered 721
702 logger.error(str(exc)) 722 # Restore the backup fds
703 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 723 os.dup2(osi[0], osi[1])
704 return 1 724 os.dup2(oso[0], oso[1])
705 finally: 725 os.dup2(ose[0], ose[1])
706 sys.stdout.flush() 726
707 sys.stderr.flush() 727 # Close the backup fds
708 728 os.close(osi[0])
709 bblogger.removeHandler(handler) 729 os.close(oso[0])
710 730 os.close(ose[0])
711 # Restore the backup fds 731
712 os.dup2(osi[0], osi[1]) 732 logfile.close()
713 os.dup2(oso[0], oso[1]) 733 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
714 os.dup2(ose[0], ose[1]) 734 logger.debug2("Zero size logfn %s, removing", logfn)
715 735 bb.utils.remove(logfn)
716 # Close the backup fds 736 bb.utils.remove(loglink)
717 os.close(osi[0]) 737 except (Exception, SystemExit) as exc:
718 os.close(oso[0]) 738 handled = False
719 os.close(ose[0]) 739 if isinstance(exc, bb.BBHandledException):
740 handled = True
741
742 if quieterr:
743 if not handled:
744 logger.warning(repr(exc))
745 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
746 else:
747 errprinted = errchk.triggered
748 # If the output is already on stdout, we've printed the information in the
749 # logs once already so don't duplicate
750 if verboseStdoutLogging or handled:
751 errprinted = True
752 if not handled:
753 logger.error(repr(exc))
754 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
755 return 1
720 756
721 logfile.close()
722 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
723 logger.debug2("Zero size logfn %s, removing", logfn)
724 bb.utils.remove(logfn)
725 bb.utils.remove(loglink)
726 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) 757 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
727 758
728 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): 759 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -760,132 +791,92 @@ def exec_task(fn, task, d, profile = False):
760 event.fire(failedevent, d) 791 event.fire(failedevent, d)
761 return 1 792 return 1
762 793
763def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): 794def _get_cleanmask(taskname, mcfn):
764 """ 795 """
765 Internal stamp helper function 796 Internal stamp helper function to generate stamp cleaning mask
766 Makes sure the stamp directory exists
767 Returns the stamp path+filename 797 Returns the stamp path+filename
768 798
769 In the bitbake core, d can be a CacheData and file_name will be set. 799 In the bitbake core, d can be a CacheData and file_name will be set.
770 When called in task context, d will be a data store, file_name will not be set 800 When called in task context, d will be a data store, file_name will not be set
771 """ 801 """
772 taskflagname = taskname 802 cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
773 if taskname.endswith("_setscene") and taskname != "do_setscene": 803 taskflagname = taskname.replace("_setscene", "")
774 taskflagname = taskname.replace("_setscene", "") 804 if cleanmask:
775 805 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
776 if file_name: 806 return []
777 stamp = d.stamp[file_name] 807
778 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" 808def clean_stamp_mcfn(task, mcfn):
779 else: 809 cleanmask = _get_cleanmask(task, mcfn)
780 stamp = d.getVar('STAMP') 810 for mask in cleanmask:
781 file_name = d.getVar('BB_FILENAME') 811 for name in glob.glob(mask):
782 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" 812 # Preserve sigdata files in the stamps directory
813 if "sigdata" in name or "sigbasedata" in name:
814 continue
815 # Preserve taint files in the stamps directory
816 if name.endswith('.taint'):
817 continue
818 os.unlink(name)
783 819
784 if baseonly: 820def clean_stamp(task, d):
785 return stamp 821 mcfn = d.getVar('BB_FILENAME')
786 if noextra: 822 clean_stamp_mcfn(task, mcfn)
787 extrainfo = ""
788 823
789 if not stamp: 824def make_stamp_mcfn(task, mcfn):
790 return
791 825
792 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) 826 basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
793 827
794 stampdir = os.path.dirname(stamp) 828 stampdir = os.path.dirname(basestamp)
795 if cached_mtime_noerror(stampdir) == 0: 829 if cached_mtime_noerror(stampdir) == 0:
796 bb.utils.mkdirhier(stampdir) 830 bb.utils.mkdirhier(stampdir)
797 831
798 return stamp 832 clean_stamp_mcfn(task, mcfn)
799 833
800def stamp_cleanmask_internal(taskname, d, file_name): 834 # Remove the file and recreate to force timestamp
801 """ 835 # change on broken NFS filesystems
802 Internal stamp helper function to generate stamp cleaning mask 836 if basestamp:
803 Returns the stamp path+filename 837 bb.utils.remove(basestamp)
838 open(basestamp, "w").close()
804 839
805 In the bitbake core, d can be a CacheData and file_name will be set. 840def make_stamp(task, d):
806 When called in task context, d will be a data store, file_name will not be set
807 """ 841 """
808 taskflagname = taskname 842 Creates/updates a stamp for a given task
809 if taskname.endswith("_setscene") and taskname != "do_setscene": 843 """
810 taskflagname = taskname.replace("_setscene", "") 844 mcfn = d.getVar('BB_FILENAME')
811
812 if file_name:
813 stamp = d.stampclean[file_name]
814 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
815 else:
816 stamp = d.getVar('STAMPCLEAN')
817 file_name = d.getVar('BB_FILENAME')
818 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
819 845
820 if not stamp: 846 make_stamp_mcfn(task, mcfn)
821 return []
822 847
823 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) 848 # If we're in task context, write out a signature file for each task
849 # as it completes
850 if not task.endswith("_setscene"):
851 stampbase = bb.parse.siggen.stampfile_base(mcfn)
852 bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
824 853
825 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
826 854
827def make_stamp(task, d, file_name = None): 855def find_stale_stamps(task, mcfn):
828 """ 856 current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
829 Creates/updates a stamp for a given task 857 current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
830 (d can be a data dict or dataCache) 858 cleanmask = _get_cleanmask(task, mcfn)
831 """ 859 found = []
832 cleanmask = stamp_cleanmask_internal(task, d, file_name)
833 for mask in cleanmask: 860 for mask in cleanmask:
834 for name in glob.glob(mask): 861 for name in glob.glob(mask):
835 # Preserve sigdata files in the stamps directory
836 if "sigdata" in name or "sigbasedata" in name: 862 if "sigdata" in name or "sigbasedata" in name:
837 continue 863 continue
838 # Preserve taint files in the stamps directory
839 if name.endswith('.taint'): 864 if name.endswith('.taint'):
840 continue 865 continue
841 os.unlink(name) 866 if name == current or name == current2:
842 867 continue
843 stamp = stamp_internal(task, d, file_name) 868 logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2))
844 # Remove the file and recreate to force timestamp 869 found.append(name)
845 # change on broken NFS filesystems 870 return found
846 if stamp:
847 bb.utils.remove(stamp)
848 open(stamp, "w").close()
849
850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
853 stampbase = stamp_internal(task, d, None, True)
854 file_name = d.getVar('BB_FILENAME')
855 bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
856
857def del_stamp(task, d, file_name = None):
858 """
859 Removes a stamp for a given task
860 (d can be a data dict or dataCache)
861 """
862 stamp = stamp_internal(task, d, file_name)
863 bb.utils.remove(stamp)
864 871
865def write_taint(task, d, file_name = None): 872def write_taint(task, d):
866 """ 873 """
867 Creates a "taint" file which will force the specified task and its 874 Creates a "taint" file which will force the specified task and its
868 dependents to be re-run the next time by influencing the value of its 875 dependents to be re-run the next time by influencing the value of its
869 taskhash. 876 taskhash.
870 (d can be a data dict or dataCache)
871 """ 877 """
872 import uuid 878 mcfn = d.getVar('BB_FILENAME')
873 if file_name: 879 bb.parse.siggen.invalidate_task(task, mcfn)
874 taintfn = d.stamp[file_name] + '.' + task + '.taint'
875 else:
876 taintfn = d.getVar('STAMP') + '.' + task + '.taint'
877 bb.utils.mkdirhier(os.path.dirname(taintfn))
878 # The specific content of the taint file is not really important,
879 # we just need it to be random, so a random UUID is used
880 with open(taintfn, 'w') as taintf:
881 taintf.write(str(uuid.uuid4()))
882
883def stampfile(taskname, d, file_name = None, noextra=False):
884 """
885 Return the stamp for a given task
886 (d can be a data dict or dataCache)
887 """
888 return stamp_internal(taskname, d, file_name, noextra=noextra)
889 880
890def add_tasks(tasklist, d): 881def add_tasks(tasklist, d):
891 task_deps = d.getVar('_task_deps', False) 882 task_deps = d.getVar('_task_deps', False)
@@ -910,6 +901,11 @@ def add_tasks(tasklist, d):
910 task_deps[name] = {} 901 task_deps[name] = {}
911 if name in flags: 902 if name in flags:
912 deptask = d.expand(flags[name]) 903 deptask = d.expand(flags[name])
904 if name in ['noexec', 'fakeroot', 'nostamp']:
905 if deptask != '1':
906 bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' "
907 "will result in the flag not being set. See YP bug #13808.".format(name))
908
913 task_deps[name][task] = deptask 909 task_deps[name][task] = deptask
914 getTask('mcdepends') 910 getTask('mcdepends')
915 getTask('depends') 911 getTask('depends')
@@ -1008,6 +1004,8 @@ def tasksbetween(task_start, task_end, d):
1008 def follow_chain(task, endtask, chain=None): 1004 def follow_chain(task, endtask, chain=None):
1009 if not chain: 1005 if not chain:
1010 chain = [] 1006 chain = []
1007 if task in chain:
1008 bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
1011 chain.append(task) 1009 chain.append(task)
1012 for othertask in tasks: 1010 for othertask in tasks:
1013 if othertask == task: 1011 if othertask == task:
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index aea2b8bc11..18d5574a31 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -19,14 +19,16 @@
19import os 19import os
20import logging 20import logging
21import pickle 21import pickle
22from collections import defaultdict, Mapping 22from collections import defaultdict
23from collections.abc import Mapping
23import bb.utils 24import bb.utils
24from bb import PrefixLoggerAdapter 25from bb import PrefixLoggerAdapter
25import re 26import re
27import shutil
26 28
27logger = logging.getLogger("BitBake.Cache") 29logger = logging.getLogger("BitBake.Cache")
28 30
29__cache_version__ = "154" 31__cache_version__ = "155"
30 32
31def getCacheFile(path, filename, mc, data_hash): 33def getCacheFile(path, filename, mc, data_hash):
32 mcspec = '' 34 mcspec = ''
@@ -53,12 +55,12 @@ class RecipeInfoCommon(object):
53 55
54 @classmethod 56 @classmethod
55 def pkgvar(cls, var, packages, metadata): 57 def pkgvar(cls, var, packages, metadata):
56 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) 58 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
57 for pkg in packages) 59 for pkg in packages)
58 60
59 @classmethod 61 @classmethod
60 def taskvar(cls, var, tasks, metadata): 62 def taskvar(cls, var, tasks, metadata):
61 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) 63 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
62 for task in tasks) 64 for task in tasks)
63 65
64 @classmethod 66 @classmethod
@@ -103,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
103 105
104 self.tasks = metadata.getVar('__BBTASKS', False) 106 self.tasks = metadata.getVar('__BBTASKS', False)
105 107
106 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) 108 self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
107 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) 109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
108 110
109 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} 111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
@@ -126,6 +128,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False) 128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata) 129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) 130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
129 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) 132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
130 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata) 133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
131 134
@@ -163,6 +166,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
163 cachedata.fakerootenv = {} 166 cachedata.fakerootenv = {}
164 cachedata.fakerootnoenv = {} 167 cachedata.fakerootnoenv = {}
165 cachedata.fakerootdirs = {} 168 cachedata.fakerootdirs = {}
169 cachedata.fakerootlogs = {}
166 cachedata.extradepsfunc = {} 170 cachedata.extradepsfunc = {}
167 171
168 def add_cacheData(self, cachedata, fn): 172 def add_cacheData(self, cachedata, fn):
@@ -212,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
212 216
213 # Collect files we may need for possible world-dep 217 # Collect files we may need for possible world-dep
214 # calculations 218 # calculations
215 if not self.not_world: 219 if not bb.utils.to_boolean(self.not_world):
216 cachedata.possible_world.append(fn) 220 cachedata.possible_world.append(fn)
217 #else: 221 #else:
218 # logger.debug2("EXCLUDE FROM WORLD: %s", fn) 222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
@@ -231,17 +235,116 @@ class CoreRecipeInfo(RecipeInfoCommon):
231 cachedata.fakerootenv[fn] = self.fakerootenv 235 cachedata.fakerootenv[fn] = self.fakerootenv
232 cachedata.fakerootnoenv[fn] = self.fakerootnoenv 236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
233 cachedata.fakerootdirs[fn] = self.fakerootdirs 237 cachedata.fakerootdirs[fn] = self.fakerootdirs
238 cachedata.fakerootlogs[fn] = self.fakerootlogs
234 cachedata.extradepsfunc[fn] = self.extradepsfunc 239 cachedata.extradepsfunc[fn] = self.extradepsfunc
235 240
241
242class SiggenRecipeInfo(RecipeInfoCommon):
243 __slots__ = ()
244
245 classname = "SiggenRecipeInfo"
246 cachefile = "bb_cache_" + classname +".dat"
247 # we don't want to show this information in graph files so don't set cachefields
248 #cachefields = []
249
250 def __init__(self, filename, metadata):
251 self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252 self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253 self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255 @classmethod
256 def init_cacheData(cls, cachedata):
257 cachedata.siggen_taskdeps = {}
258 cachedata.siggen_gendeps = {}
259 cachedata.siggen_varvals = {}
260
261 def add_cacheData(self, cachedata, fn):
262 cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263 cachedata.siggen_varvals[fn] = self.siggen_varvals
264 cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266 # The siggen variable data is large and impacts:
267 # - bitbake's overall memory usage
268 # - the amount of data sent over IPC between parsing processes and the server
269 # - the size of the cache files on disk
270 # - the size of "sigdata" hash information files on disk
271 # The data consists of strings (some large) or frozenset lists of variables
272 # As such, we a) deplicate the data here and b) pass references to the object at second
273 # access (e.g. over IPC or saving into pickle).
274
275 store = {}
276 save_map = {}
277 save_count = 1
278 restore_map = {}
279 restore_count = {}
280
281 @classmethod
282 def reset(cls):
283 # Needs to be called before starting new streamed data in a given process
284 # (e.g. writing out the cache again)
285 cls.save_map = {}
286 cls.save_count = 1
287 cls.restore_map = {}
288
289 @classmethod
290 def _save(cls, deps):
291 ret = []
292 if not deps:
293 return deps
294 for dep in deps:
295 fs = deps[dep]
296 if fs is None:
297 ret.append((dep, None, None))
298 elif fs in cls.save_map:
299 ret.append((dep, None, cls.save_map[fs]))
300 else:
301 cls.save_map[fs] = cls.save_count
302 ret.append((dep, fs, cls.save_count))
303 cls.save_count = cls.save_count + 1
304 return ret
305
306 @classmethod
307 def _restore(cls, deps, pid):
308 ret = {}
309 if not deps:
310 return deps
311 if pid not in cls.restore_map:
312 cls.restore_map[pid] = {}
313 map = cls.restore_map[pid]
314 for dep, fs, mapnum in deps:
315 if fs is None and mapnum is None:
316 ret[dep] = None
317 elif fs is None:
318 ret[dep] = map[mapnum]
319 else:
320 try:
321 fs = cls.store[fs]
322 except KeyError:
323 cls.store[fs] = fs
324 map[mapnum] = fs
325 ret[dep] = fs
326 return ret
327
328 def __getstate__(self):
329 ret = {}
330 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331 ret[key] = self._save(self.__dict__[key])
332 ret['pid'] = os.getpid()
333 return ret
334
335 def __setstate__(self, state):
336 pid = state['pid']
337 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338 setattr(self, key, self._restore(state[key], pid))
339
340
236def virtualfn2realfn(virtualfn): 341def virtualfn2realfn(virtualfn):
237 """ 342 """
238 Convert a virtual file name to a real one + the associated subclass keyword 343 Convert a virtual file name to a real one + the associated subclass keyword
239 """ 344 """
240 mc = "" 345 mc = ""
241 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: 346 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
242 elems = virtualfn.split(':') 347 (_, mc, virtualfn) = virtualfn.split(':', 2)
243 mc = elems[1]
244 virtualfn = ":".join(elems[2:])
245 348
246 fn = virtualfn 349 fn = virtualfn
247 cls = "" 350 cls = ""
@@ -264,7 +367,7 @@ def realfn2virtual(realfn, cls, mc):
264 367
265def variant2virtual(realfn, variant): 368def variant2virtual(realfn, variant):
266 """ 369 """
267 Convert a real filename + the associated subclass keyword to a virtual filename 370 Convert a real filename + a variant to a virtual filename
268 """ 371 """
269 if variant == "": 372 if variant == "":
270 return realfn 373 return realfn
@@ -275,96 +378,18 @@ def variant2virtual(realfn, variant):
275 return "mc:" + elems[1] + ":" + realfn 378 return "mc:" + elems[1] + ":" + realfn
276 return "virtual:" + variant + ":" + realfn 379 return "virtual:" + variant + ":" + realfn
277 380
278def parse_recipe(bb_data, bbfile, appends, mc=''): 381#
279 """ 382# Cooker calls cacheValid on its recipe list, then either calls loadCached
280 Parse a recipe 383# from it's main thread or parse from separate processes to generate an up to
281 """ 384# date cache
282 385#
283 chdir_back = False 386class Cache(object):
284
285 bb_data.setVar("__BBMULTICONFIG", mc)
286
287 # expand tmpdir to include this topdir
288 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290 oldpath = os.path.abspath(os.getcwd())
291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
293 # The ConfHandler first looks if there is a TOPDIR and if not
294 # then it would call getcwd().
295 # Previously, we chdir()ed to bbfile_loc, called the handler
296 # and finally chdir()ed back, a couple of thousand times. We now
297 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
298 if not bb_data.getVar('TOPDIR', False):
299 chdir_back = True
300 bb_data.setVar('TOPDIR', bbfile_loc)
301 try:
302 if appends:
303 bb_data.setVar('__BBAPPEND', " ".join(appends))
304 bb_data = bb.parse.handle(bbfile, bb_data)
305 if chdir_back:
306 os.chdir(oldpath)
307 return bb_data
308 except:
309 if chdir_back:
310 os.chdir(oldpath)
311 raise
312
313
314
315class NoCache(object):
316
317 def __init__(self, databuilder):
318 self.databuilder = databuilder
319 self.data = databuilder.data
320
321 def loadDataFull(self, virtualfn, appends):
322 """
323 Return a complete set of data for fn.
324 To do this, we need to parse the file.
325 """
326 logger.debug("Parsing %s (full)" % virtualfn)
327 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
328 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
329 return bb_data[virtual]
330
331 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
332 """
333 Load and parse one .bb build file
334 Return the data and whether parsing resulted in the file being skipped
335 """
336
337 if virtonly:
338 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
339 bb_data = self.databuilder.mcdata[mc].createCopy()
340 bb_data.setVar("__ONLYFINALISE", virtual or "default")
341 datastores = parse_recipe(bb_data, bbfile, appends, mc)
342 return datastores
343
344 if mc is not None:
345 bb_data = self.databuilder.mcdata[mc].createCopy()
346 return parse_recipe(bb_data, bbfile, appends, mc)
347
348 bb_data = self.data.createCopy()
349 datastores = parse_recipe(bb_data, bbfile, appends)
350
351 for mc in self.databuilder.mcdata:
352 if not mc:
353 continue
354 bb_data = self.databuilder.mcdata[mc].createCopy()
355 newstores = parse_recipe(bb_data, bbfile, appends, mc)
356 for ns in newstores:
357 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
358
359 return datastores
360
361class Cache(NoCache):
362 """ 387 """
363 BitBake Cache implementation 388 BitBake Cache implementation
364 """ 389 """
365 def __init__(self, databuilder, mc, data_hash, caches_array): 390 def __init__(self, databuilder, mc, data_hash, caches_array):
366 super().__init__(databuilder) 391 self.databuilder = databuilder
367 data = databuilder.data 392 self.data = databuilder.data
368 393
369 # Pass caches_array information into Cache Constructor 394 # Pass caches_array information into Cache Constructor
370 # It will be used later for deciding whether we 395 # It will be used later for deciding whether we
@@ -372,7 +397,7 @@ class Cache(NoCache):
372 self.mc = mc 397 self.mc = mc
373 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) 398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger)
374 self.caches_array = caches_array 399 self.caches_array = caches_array
375 self.cachedir = data.getVar("CACHE") 400 self.cachedir = self.data.getVar("CACHE")
376 self.clean = set() 401 self.clean = set()
377 self.checked = set() 402 self.checked = set()
378 self.depends_cache = {} 403 self.depends_cache = {}
@@ -382,20 +407,12 @@ class Cache(NoCache):
382 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') 407 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
383 408
384 if self.cachedir in [None, '']: 409 if self.cachedir in [None, '']:
385 self.has_cache = False 410 bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
386 self.logger.info("Not using a cache. "
387 "Set CACHE = <directory> to enable.")
388 return
389
390 self.has_cache = True
391 411
392 def getCacheFile(self, cachefile): 412 def getCacheFile(self, cachefile):
393 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) 413 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
394 414
395 def prepare_cache(self, progress): 415 def prepare_cache(self, progress):
396 if not self.has_cache:
397 return 0
398
399 loaded = 0 416 loaded = 0
400 417
401 self.cachefile = self.getCacheFile("bb_cache.dat") 418 self.cachefile = self.getCacheFile("bb_cache.dat")
@@ -434,9 +451,6 @@ class Cache(NoCache):
434 return loaded 451 return loaded
435 452
436 def cachesize(self): 453 def cachesize(self):
437 if not self.has_cache:
438 return 0
439
440 cachesize = 0 454 cachesize = 0
441 for cache_class in self.caches_array: 455 for cache_class in self.caches_array:
442 cachefile = self.getCacheFile(cache_class.cachefile) 456 cachefile = self.getCacheFile(cache_class.cachefile)
@@ -498,11 +512,11 @@ class Cache(NoCache):
498 512
499 return len(self.depends_cache) 513 return len(self.depends_cache)
500 514
501 def parse(self, filename, appends): 515 def parse(self, filename, appends, layername):
502 """Parse the specified filename, returning the recipe information""" 516 """Parse the specified filename, returning the recipe information"""
503 self.logger.debug("Parsing %s", filename) 517 self.logger.debug("Parsing %s", filename)
504 infos = [] 518 infos = []
505 datastores = self.load_bbfile(filename, appends, mc=self.mc) 519 datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
506 depends = [] 520 depends = []
507 variants = [] 521 variants = []
508 # Process the "real" fn last so we can store variants list 522 # Process the "real" fn last so we can store variants list
@@ -524,43 +538,19 @@ class Cache(NoCache):
524 538
525 return infos 539 return infos
526 540
527 def load(self, filename, appends): 541 def loadCached(self, filename, appends):
528 """Obtain the recipe information for the specified filename, 542 """Obtain the recipe information for the specified filename,
529 using cached values if available, otherwise parsing. 543 using cached values.
530 544 """
531 Note that if it does parse to obtain the info, it will not
532 automatically add the information to the cache or to your
533 CacheData. Use the add or add_info method to do so after
534 running this, or use loadData instead."""
535 cached = self.cacheValid(filename, appends)
536 if cached:
537 infos = []
538 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
539 info_array = self.depends_cache[filename]
540 for variant in info_array[0].variants:
541 virtualfn = variant2virtual(filename, variant)
542 infos.append((virtualfn, self.depends_cache[virtualfn]))
543 else:
544 return self.parse(filename, appends, configdata, self.caches_array)
545
546 return cached, infos
547
548 def loadData(self, fn, appends, cacheData):
549 """Load the recipe info for the specified filename,
550 parsing and adding to the cache if necessary, and adding
551 the recipe information to the supplied CacheData instance."""
552 skipped, virtuals = 0, 0
553 545
554 cached, infos = self.load(fn, appends) 546 infos = []
555 for virtualfn, info_array in infos: 547 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
556 if info_array[0].skipped: 548 info_array = self.depends_cache[filename]
557 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) 549 for variant in info_array[0].variants:
558 skipped += 1 550 virtualfn = variant2virtual(filename, variant)
559 else: 551 infos.append((virtualfn, self.depends_cache[virtualfn]))
560 self.add_info(virtualfn, info_array, cacheData, not cached)
561 virtuals += 1
562 552
563 return cached, skipped, virtuals 553 return infos
564 554
565 def cacheValid(self, fn, appends): 555 def cacheValid(self, fn, appends):
566 """ 556 """
@@ -569,10 +559,6 @@ class Cache(NoCache):
569 """ 559 """
570 if fn not in self.checked: 560 if fn not in self.checked:
571 self.cacheValidUpdate(fn, appends) 561 self.cacheValidUpdate(fn, appends)
572
573 # Is cache enabled?
574 if not self.has_cache:
575 return False
576 if fn in self.clean: 562 if fn in self.clean:
577 return True 563 return True
578 return False 564 return False
@@ -582,10 +568,6 @@ class Cache(NoCache):
582 Is the cache valid for fn? 568 Is the cache valid for fn?
583 Make thorough (slower) checks including timestamps. 569 Make thorough (slower) checks including timestamps.
584 """ 570 """
585 # Is cache enabled?
586 if not self.has_cache:
587 return False
588
589 self.checked.add(fn) 571 self.checked.add(fn)
590 572
591 # File isn't in depends_cache 573 # File isn't in depends_cache
@@ -636,7 +618,7 @@ class Cache(NoCache):
636 for f in flist: 618 for f in flist:
637 if not f: 619 if not f:
638 continue 620 continue
639 f, exist = f.split(":") 621 f, exist = f.rsplit(":", 1)
640 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): 622 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
641 self.logger.debug2("%s's file checksum list file %s changed", 623 self.logger.debug2("%s's file checksum list file %s changed",
642 fn, f) 624 fn, f)
@@ -692,10 +674,6 @@ class Cache(NoCache):
692 Save the cache 674 Save the cache
693 Called from the parser when complete (or exiting) 675 Called from the parser when complete (or exiting)
694 """ 676 """
695
696 if not self.has_cache:
697 return
698
699 if self.cacheclean: 677 if self.cacheclean:
700 self.logger.debug2("Cache is clean, not saving.") 678 self.logger.debug2("Cache is clean, not saving.")
701 return 679 return
@@ -716,6 +694,7 @@ class Cache(NoCache):
716 p.dump(info) 694 p.dump(info)
717 695
718 del self.depends_cache 696 del self.depends_cache
697 SiggenRecipeInfo.reset()
719 698
720 @staticmethod 699 @staticmethod
721 def mtime(cachefile): 700 def mtime(cachefile):
@@ -738,26 +717,11 @@ class Cache(NoCache):
738 if watcher: 717 if watcher:
739 watcher(info_array[0].file_depends) 718 watcher(info_array[0].file_depends)
740 719
741 if not self.has_cache:
742 return
743
744 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: 720 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
745 if parsed: 721 if parsed:
746 self.cacheclean = False 722 self.cacheclean = False
747 self.depends_cache[filename] = info_array 723 self.depends_cache[filename] = info_array
748 724
749 def add(self, file_name, data, cacheData, parsed=None):
750 """
751 Save data we need into the cache
752 """
753
754 realfn = virtualfn2realfn(file_name)[0]
755
756 info_array = []
757 for cache_class in self.caches_array:
758 info_array.append(cache_class(realfn, data))
759 self.add_info(file_name, info_array, cacheData, parsed)
760
761class MulticonfigCache(Mapping): 725class MulticonfigCache(Mapping):
762 def __init__(self, databuilder, data_hash, caches_array): 726 def __init__(self, databuilder, data_hash, caches_array):
763 def progress(p): 727 def progress(p):
@@ -794,6 +758,7 @@ class MulticonfigCache(Mapping):
794 loaded = 0 758 loaded = 0
795 759
796 for c in self.__caches.values(): 760 for c in self.__caches.values():
761 SiggenRecipeInfo.reset()
797 loaded += c.prepare_cache(progress) 762 loaded += c.prepare_cache(progress)
798 previous_progress = current_progress 763 previous_progress = current_progress
799 764
@@ -871,11 +836,10 @@ class MultiProcessCache(object):
871 self.cachedata = self.create_cachedata() 836 self.cachedata = self.create_cachedata()
872 self.cachedata_extras = self.create_cachedata() 837 self.cachedata_extras = self.create_cachedata()
873 838
874 def init_cache(self, d, cache_file_name=None): 839 def init_cache(self, cachedir, cache_file_name=None):
875 cachedir = (d.getVar("PERSISTENT_DIR") or 840 if not cachedir:
876 d.getVar("CACHE"))
877 if cachedir in [None, '']:
878 return 841 return
842
879 bb.utils.mkdirhier(cachedir) 843 bb.utils.mkdirhier(cachedir)
880 self.cachefile = os.path.join(cachedir, 844 self.cachefile = os.path.join(cachedir,
881 cache_file_name or self.__class__.cache_file_name) 845 cache_file_name or self.__class__.cache_file_name)
@@ -906,6 +870,10 @@ class MultiProcessCache(object):
906 if not self.cachefile: 870 if not self.cachefile:
907 return 871 return
908 872
873 have_data = any(self.cachedata_extras)
874 if not have_data:
875 return
876
909 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) 877 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
910 878
911 i = os.getpid() 879 i = os.getpid()
@@ -940,6 +908,8 @@ class MultiProcessCache(object):
940 908
941 data = self.cachedata 909 data = self.cachedata
942 910
911 have_data = False
912
943 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: 913 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
944 f = os.path.join(os.path.dirname(self.cachefile), f) 914 f = os.path.join(os.path.dirname(self.cachefile), f)
945 try: 915 try:
@@ -954,12 +924,14 @@ class MultiProcessCache(object):
954 os.unlink(f) 924 os.unlink(f)
955 continue 925 continue
956 926
927 have_data = True
957 self.merge_data(extradata, data) 928 self.merge_data(extradata, data)
958 os.unlink(f) 929 os.unlink(f)
959 930
960 with open(self.cachefile, "wb") as f: 931 if have_data:
961 p = pickle.Pickler(f, -1) 932 with open(self.cachefile, "wb") as f:
962 p.dump([data, self.__class__.CACHE_VERSION]) 933 p = pickle.Pickler(f, -1)
934 p.dump([data, self.__class__.CACHE_VERSION])
963 935
964 bb.utils.unlockfile(glf) 936 bb.utils.unlockfile(glf)
965 937
@@ -1015,3 +987,11 @@ class SimpleCache(object):
1015 p.dump([data, self.cacheversion]) 987 p.dump([data, self.cacheversion])
1016 988
1017 bb.utils.unlockfile(glf) 989 bb.utils.unlockfile(glf)
990
991 def copyfile(self, target):
992 if not self.cachefile:
993 return
994
995 glf = bb.utils.lockfile(self.cachefile + ".lock")
996 shutil.copy(self.cachefile, target)
997 bb.utils.unlockfile(glf)
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
index 1d50a26426..557793d366 100644
--- a/bitbake/lib/bb/checksum.py
+++ b/bitbake/lib/bb/checksum.py
@@ -11,10 +11,13 @@ import os
11import stat 11import stat
12import bb.utils 12import bb.utils
13import logging 13import logging
14import re
14from bb.cache import MultiProcessCache 15from bb.cache import MultiProcessCache
15 16
16logger = logging.getLogger("BitBake.Cache") 17logger = logging.getLogger("BitBake.Cache")
17 18
19filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
20
18# mtime cache (non-persistent) 21# mtime cache (non-persistent)
19# based upon the assumption that files do not change during bitbake run 22# based upon the assumption that files do not change during bitbake run
20class FileMtimeCache(object): 23class FileMtimeCache(object):
@@ -50,6 +53,7 @@ class FileChecksumCache(MultiProcessCache):
50 MultiProcessCache.__init__(self) 53 MultiProcessCache.__init__(self)
51 54
52 def get_checksum(self, f): 55 def get_checksum(self, f):
56 f = os.path.normpath(f)
53 entry = self.cachedata[0].get(f) 57 entry = self.cachedata[0].get(f)
54 cmtime = self.mtime_cache.cached_mtime(f) 58 cmtime = self.mtime_cache.cached_mtime(f)
55 if entry: 59 if entry:
@@ -84,22 +88,36 @@ class FileChecksumCache(MultiProcessCache):
84 return None 88 return None
85 return checksum 89 return checksum
86 90
91 #
92 # Changing the format of file-checksums is problematic as both OE and Bitbake have
93 # knowledge of them. We need to encode a new piece of data, the portion of the path
94 # we care about from a checksum perspective. This means that files that change subdirectory
95 # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into
96 # the path. The filesystem handles it but it gives us a marker to know which subsection
97 # of the path to cache.
98 #
87 def checksum_dir(pth): 99 def checksum_dir(pth):
88 # Handle directories recursively 100 # Handle directories recursively
89 if pth == "/": 101 if pth == "/":
90 bb.fatal("Refusing to checksum /") 102 bb.fatal("Refusing to checksum /")
103 pth = pth.rstrip("/")
91 dirchecksums = [] 104 dirchecksums = []
92 for root, dirs, files in os.walk(pth, topdown=True): 105 for root, dirs, files in os.walk(pth, topdown=True):
93 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude] 106 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude]
94 for name in files: 107 for name in files:
95 fullpth = os.path.join(root, name) 108 fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, "."))
96 checksum = checksum_file(fullpth) 109 checksum = checksum_file(fullpth)
97 if checksum: 110 if checksum:
98 dirchecksums.append((fullpth, checksum)) 111 dirchecksums.append((fullpth, checksum))
99 return dirchecksums 112 return dirchecksums
100 113
101 checksums = [] 114 checksums = []
102 for pth in filelist.split(): 115 for pth in filelist_regex.split(filelist):
116 if not pth:
117 continue
118 pth = pth.strip()
119 if not pth:
120 continue
103 exist = pth.split(":")[1] 121 exist = pth.split(":")[1]
104 if exist == "False": 122 if exist == "False":
105 continue 123 continue
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 25a7ac69d3..2e8b7ced3c 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -25,6 +27,7 @@ import ast
25import sys 27import sys
26import codegen 28import codegen
27import logging 29import logging
30import inspect
28import bb.pysh as pysh 31import bb.pysh as pysh
29import bb.utils, bb.data 32import bb.utils, bb.data
30import hashlib 33import hashlib
@@ -56,10 +59,40 @@ def check_indent(codestr):
56 59
57 return codestr 60 return codestr
58 61
59# A custom getstate/setstate using tuples is actually worth 15% cachesize by 62modulecode_deps = {}
60# avoiding duplication of the attribute names!
61 63
64def add_module_functions(fn, functions, namespace):
65 import os
66 fstat = os.stat(fn)
67 fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime)
68 for f in functions:
69 name = "%s.%s" % (namespace, f)
70 parser = PythonParser(name, logger)
71 try:
72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f)
73 #bb.warn("Cached %s" % f)
74 except KeyError:
75 lines, lineno = inspect.getsourcelines(functions[f])
76 src = "".join(lines)
77 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f)
78 #bb.warn("Not cached %s" % f)
79 execs = parser.execs.copy()
80 # Expand internal module exec references
81 for e in parser.execs:
82 if e in functions:
83 execs.remove(e)
84 execs.add(namespace + "." + e)
85 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy()]
86 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
87
88def update_module_dependencies(d):
89 for mod in modulecode_deps:
90 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
91 if excludes:
92 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3]]
62 93
94# A custom getstate/setstate using tuples is actually worth 15% cachesize by
95# avoiding duplication of the attribute names!
63class SetCache(object): 96class SetCache(object):
64 def __init__(self): 97 def __init__(self):
65 self.setcache = {} 98 self.setcache = {}
@@ -152,12 +185,12 @@ class CodeParserCache(MultiProcessCache):
152 self.shellcachelines[h] = cacheline 185 self.shellcachelines[h] = cacheline
153 return cacheline 186 return cacheline
154 187
155 def init_cache(self, d): 188 def init_cache(self, cachedir):
156 # Check if we already have the caches 189 # Check if we already have the caches
157 if self.pythoncache: 190 if self.pythoncache:
158 return 191 return
159 192
160 MultiProcessCache.init_cache(self, d) 193 MultiProcessCache.init_cache(self, cachedir)
161 194
162 # cachedata gets re-assigned in the parent 195 # cachedata gets re-assigned in the parent
163 self.pythoncache = self.cachedata[0] 196 self.pythoncache = self.cachedata[0]
@@ -169,8 +202,8 @@ class CodeParserCache(MultiProcessCache):
169 202
170codeparsercache = CodeParserCache() 203codeparsercache = CodeParserCache()
171 204
172def parser_cache_init(d): 205def parser_cache_init(cachedir):
173 codeparsercache.init_cache(d) 206 codeparsercache.init_cache(cachedir)
174 207
175def parser_cache_save(): 208def parser_cache_save():
176 codeparsercache.save_extras() 209 codeparsercache.save_extras()
@@ -195,6 +228,10 @@ class BufferedLogger(Logger):
195 self.target.handle(record) 228 self.target.handle(record)
196 self.buffer = [] 229 self.buffer = []
197 230
231class DummyLogger():
232 def flush(self):
233 return
234
198class PythonParser(): 235class PythonParser():
199 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional") 236 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
200 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") 237 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
@@ -212,26 +249,26 @@ class PythonParser():
212 funcstr = codegen.to_source(func) 249 funcstr = codegen.to_source(func)
213 argstr = codegen.to_source(arg) 250 argstr = codegen.to_source(arg)
214 except TypeError: 251 except TypeError:
215 self.log.debug(2, 'Failed to convert function and argument to source form') 252 self.log.debug2('Failed to convert function and argument to source form')
216 else: 253 else:
217 self.log.debug(1, self.unhandled_message % (funcstr, argstr)) 254 self.log.debug(self.unhandled_message % (funcstr, argstr))
218 255
219 def visit_Call(self, node): 256 def visit_Call(self, node):
220 name = self.called_node_name(node.func) 257 name = self.called_node_name(node.func)
221 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): 258 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
222 if isinstance(node.args[0], ast.Str): 259 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
223 varname = node.args[0].s 260 varname = node.args[0].value
224 if name in self.containsfuncs and isinstance(node.args[1], ast.Str): 261 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
225 if varname not in self.contains: 262 if varname not in self.contains:
226 self.contains[varname] = set() 263 self.contains[varname] = set()
227 self.contains[varname].add(node.args[1].s) 264 self.contains[varname].add(node.args[1].value)
228 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str): 265 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant):
229 if varname not in self.contains: 266 if varname not in self.contains:
230 self.contains[varname] = set() 267 self.contains[varname] = set()
231 self.contains[varname].update(node.args[1].s.split()) 268 self.contains[varname].update(node.args[1].value.split())
232 elif name.endswith(self.getvarflags): 269 elif name.endswith(self.getvarflags):
233 if isinstance(node.args[1], ast.Str): 270 if isinstance(node.args[1], ast.Constant):
234 self.references.add('%s[%s]' % (varname, node.args[1].s)) 271 self.references.add('%s[%s]' % (varname, node.args[1].value))
235 else: 272 else:
236 self.warn(node.func, node.args[1]) 273 self.warn(node.func, node.args[1])
237 else: 274 else:
@@ -239,8 +276,8 @@ class PythonParser():
239 else: 276 else:
240 self.warn(node.func, node.args[0]) 277 self.warn(node.func, node.args[0])
241 elif name and name.endswith(".expand"): 278 elif name and name.endswith(".expand"):
242 if isinstance(node.args[0], ast.Str): 279 if isinstance(node.args[0], ast.Constant):
243 value = node.args[0].s 280 value = node.args[0].value
244 d = bb.data.init() 281 d = bb.data.init()
245 parser = d.expandWithRefs(value, self.name) 282 parser = d.expandWithRefs(value, self.name)
246 self.references |= parser.references 283 self.references |= parser.references
@@ -250,8 +287,8 @@ class PythonParser():
250 self.contains[varname] = set() 287 self.contains[varname] = set()
251 self.contains[varname] |= parser.contains[varname] 288 self.contains[varname] |= parser.contains[varname]
252 elif name in self.execfuncs: 289 elif name in self.execfuncs:
253 if isinstance(node.args[0], ast.Str): 290 if isinstance(node.args[0], ast.Constant):
254 self.var_execs.add(node.args[0].s) 291 self.var_execs.add(node.args[0].value)
255 else: 292 else:
256 self.warn(node.func, node.args[0]) 293 self.warn(node.func, node.args[0])
257 elif name and isinstance(node.func, (ast.Name, ast.Attribute)): 294 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
@@ -276,16 +313,24 @@ class PythonParser():
276 self.contains = {} 313 self.contains = {}
277 self.execs = set() 314 self.execs = set()
278 self.references = set() 315 self.references = set()
279 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) 316 self._log = log
317 # Defer init as expensive
318 self.log = DummyLogger()
280 319
281 self.unhandled_message = "in call of %s, argument '%s' is not a string literal" 320 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
282 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) 321 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
283 322
284 def parse_python(self, node, lineno=0, filename="<string>"): 323 # For the python module code it is expensive to have the function text so it is
285 if not node or not node.strip(): 324 # uses a different fixedhash to cache against. We can take the hit on obtaining the
325 # text if it isn't in the cache.
326 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None):
327 if not fixedhash and (not node or not node.strip()):
286 return 328 return
287 329
288 h = bbhash(str(node)) 330 if fixedhash:
331 h = fixedhash
332 else:
333 h = bbhash(str(node))
289 334
290 if h in codeparsercache.pythoncache: 335 if h in codeparsercache.pythoncache:
291 self.references = set(codeparsercache.pythoncache[h].refs) 336 self.references = set(codeparsercache.pythoncache[h].refs)
@@ -303,6 +348,12 @@ class PythonParser():
303 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) 348 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
304 return 349 return
305 350
351 if fixedhash and not node:
352 raise KeyError
353
354 # Need to parse so take the hit on the real log buffer
355 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log)
356
306 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though 357 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
307 node = "\n" * int(lineno) + node 358 node = "\n" * int(lineno) + node
308 code = compile(check_indent(str(node)), filename, "exec", 359 code = compile(check_indent(str(node)), filename, "exec",
@@ -321,7 +372,11 @@ class ShellParser():
321 self.funcdefs = set() 372 self.funcdefs = set()
322 self.allexecs = set() 373 self.allexecs = set()
323 self.execs = set() 374 self.execs = set()
324 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) 375 self._name = name
376 self._log = log
377 # Defer init as expensive
378 self.log = DummyLogger()
379
325 self.unhandled_template = "unable to handle non-literal command '%s'" 380 self.unhandled_template = "unable to handle non-literal command '%s'"
326 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) 381 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
327 382
@@ -340,6 +395,9 @@ class ShellParser():
340 self.execs = set(codeparsercache.shellcacheextras[h].execs) 395 self.execs = set(codeparsercache.shellcacheextras[h].execs)
341 return self.execs 396 return self.execs
342 397
398 # Need to parse so take the hit on the real log buffer
399 self.log = BufferedLogger('BitBake.Data.%s' % self._name, logging.DEBUG, self._log)
400
343 self._parse_shell(value) 401 self._parse_shell(value)
344 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) 402 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
345 403
@@ -450,7 +508,7 @@ class ShellParser():
450 508
451 cmd = word[1] 509 cmd = word[1]
452 if cmd.startswith("$"): 510 if cmd.startswith("$"):
453 self.log.debug(1, self.unhandled_template % cmd) 511 self.log.debug(self.unhandled_template % cmd)
454 elif cmd == "eval": 512 elif cmd == "eval":
455 command = " ".join(word for _, word in words[1:]) 513 command = " ".join(word for _, word in words[1:])
456 self._parse_shell(command) 514 self._parse_shell(command)
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index dd77cdd6e2..1fcb9bf14c 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -20,6 +20,7 @@ Commands are queued in a CommandQueue
20 20
21from collections import OrderedDict, defaultdict 21from collections import OrderedDict, defaultdict
22 22
23import io
23import bb.event 24import bb.event
24import bb.cooker 25import bb.cooker
25import bb.remotedata 26import bb.remotedata
@@ -50,23 +51,32 @@ class Command:
50 """ 51 """
51 A queue of asynchronous commands for bitbake 52 A queue of asynchronous commands for bitbake
52 """ 53 """
53 def __init__(self, cooker): 54 def __init__(self, cooker, process_server):
54 self.cooker = cooker 55 self.cooker = cooker
55 self.cmds_sync = CommandsSync() 56 self.cmds_sync = CommandsSync()
56 self.cmds_async = CommandsAsync() 57 self.cmds_async = CommandsAsync()
57 self.remotedatastores = None 58 self.remotedatastores = None
58 59
59 # FIXME Add lock for this 60 self.process_server = process_server
61 # Access with locking using process_server.{get/set/clear}_async_cmd()
60 self.currentAsyncCommand = None 62 self.currentAsyncCommand = None
61 63
62 def runCommand(self, commandline, ro_only = False): 64 def runCommand(self, commandline, process_server, ro_only=False):
63 command = commandline.pop(0) 65 command = commandline.pop(0)
64 66
65 # Ensure cooker is ready for commands 67 # Ensure cooker is ready for commands
66 if command != "updateConfig" and command != "setFeatures": 68 if command not in ["updateConfig", "setFeatures", "ping"]:
67 self.cooker.init_configdata() 69 try:
68 if not self.remotedatastores: 70 self.cooker.init_configdata()
69 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker) 71 if not self.remotedatastores:
72 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
73 except (Exception, SystemExit) as exc:
74 import traceback
75 if isinstance(exc, bb.BBHandledException):
76 # We need to start returning real exceptions here. Until we do, we can't
77 # tell if an exception is an instance of bb.BBHandledException
78 return None, "bb.BBHandledException()\n" + traceback.format_exc()
79 return None, traceback.format_exc()
70 80
71 if hasattr(CommandsSync, command): 81 if hasattr(CommandsSync, command):
72 # Can run synchronous commands straight away 82 # Can run synchronous commands straight away
@@ -75,7 +85,6 @@ class Command:
75 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'): 85 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
76 return None, "Not able to execute not readonly commands in readonly mode" 86 return None, "Not able to execute not readonly commands in readonly mode"
77 try: 87 try:
78 self.cooker.process_inotify_updates()
79 if getattr(command_method, 'needconfig', True): 88 if getattr(command_method, 'needconfig', True):
80 self.cooker.updateCacheSync() 89 self.cooker.updateCacheSync()
81 result = command_method(self, commandline) 90 result = command_method(self, commandline)
@@ -90,24 +99,23 @@ class Command:
90 return None, traceback.format_exc() 99 return None, traceback.format_exc()
91 else: 100 else:
92 return result, None 101 return result, None
93 if self.currentAsyncCommand is not None:
94 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
95 if command not in CommandsAsync.__dict__: 102 if command not in CommandsAsync.__dict__:
96 return None, "No such command" 103 return None, "No such command"
97 self.currentAsyncCommand = (command, commandline) 104 if not process_server.set_async_cmd((command, commandline)):
98 self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker) 105 return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0]
106 self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server)
99 return True, None 107 return True, None
100 108
101 def runAsyncCommand(self): 109 def runAsyncCommand(self, _, process_server, halt):
102 try: 110 try:
103 self.cooker.process_inotify_updates()
104 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown): 111 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
105 # updateCache will trigger a shutdown of the parser 112 # updateCache will trigger a shutdown of the parser
106 # and then raise BBHandledException triggering an exit 113 # and then raise BBHandledException triggering an exit
107 self.cooker.updateCache() 114 self.cooker.updateCache()
108 return False 115 return bb.server.process.idleFinish("Cooker in error state")
109 if self.currentAsyncCommand is not None: 116 cmd = process_server.get_async_cmd()
110 (command, options) = self.currentAsyncCommand 117 if cmd is not None:
118 (command, options) = cmd
111 commandmethod = getattr(CommandsAsync, command) 119 commandmethod = getattr(CommandsAsync, command)
112 needcache = getattr( commandmethod, "needcache" ) 120 needcache = getattr( commandmethod, "needcache" )
113 if needcache and self.cooker.state != bb.cooker.state.running: 121 if needcache and self.cooker.state != bb.cooker.state.running:
@@ -117,24 +125,21 @@ class Command:
117 commandmethod(self.cmds_async, self, options) 125 commandmethod(self.cmds_async, self, options)
118 return False 126 return False
119 else: 127 else:
120 return False 128 return bb.server.process.idleFinish("Nothing to do, no async command?")
121 except KeyboardInterrupt as exc: 129 except KeyboardInterrupt as exc:
122 self.finishAsyncCommand("Interrupted") 130 return bb.server.process.idleFinish("Interrupted")
123 return False
124 except SystemExit as exc: 131 except SystemExit as exc:
125 arg = exc.args[0] 132 arg = exc.args[0]
126 if isinstance(arg, str): 133 if isinstance(arg, str):
127 self.finishAsyncCommand(arg) 134 return bb.server.process.idleFinish(arg)
128 else: 135 else:
129 self.finishAsyncCommand("Exited with %s" % arg) 136 return bb.server.process.idleFinish("Exited with %s" % arg)
130 return False
131 except Exception as exc: 137 except Exception as exc:
132 import traceback 138 import traceback
133 if isinstance(exc, bb.BBHandledException): 139 if isinstance(exc, bb.BBHandledException):
134 self.finishAsyncCommand("") 140 return bb.server.process.idleFinish("")
135 else: 141 else:
136 self.finishAsyncCommand(traceback.format_exc()) 142 return bb.server.process.idleFinish(traceback.format_exc())
137 return False
138 143
139 def finishAsyncCommand(self, msg=None, code=None): 144 def finishAsyncCommand(self, msg=None, code=None):
140 if msg or msg == "": 145 if msg or msg == "":
@@ -143,8 +148,8 @@ class Command:
143 bb.event.fire(CommandExit(code), self.cooker.data) 148 bb.event.fire(CommandExit(code), self.cooker.data)
144 else: 149 else:
145 bb.event.fire(CommandCompleted(), self.cooker.data) 150 bb.event.fire(CommandCompleted(), self.cooker.data)
146 self.currentAsyncCommand = None
147 self.cooker.finishcommand() 151 self.cooker.finishcommand()
152 self.process_server.clear_async_cmd()
148 153
149 def reset(self): 154 def reset(self):
150 if self.remotedatastores: 155 if self.remotedatastores:
@@ -157,6 +162,14 @@ class CommandsSync:
157 These must not influence any running synchronous command. 162 These must not influence any running synchronous command.
158 """ 163 """
159 164
165 def ping(self, command, params):
166 """
167 Allow a UI to check the server is still alive
168 """
169 return "Still alive!"
170 ping.needconfig = False
171 ping.readonly = True
172
160 def stateShutdown(self, command, params): 173 def stateShutdown(self, command, params):
161 """ 174 """
162 Trigger cooker 'shutdown' mode 175 Trigger cooker 'shutdown' mode
@@ -294,6 +307,11 @@ class CommandsSync:
294 return ret 307 return ret
295 getLayerPriorities.readonly = True 308 getLayerPriorities.readonly = True
296 309
310 def revalidateCaches(self, command, params):
311 """Called by UI clients when metadata may have changed"""
312 command.cooker.revalidateCaches()
313 parseConfiguration.needconfig = False
314
297 def getRecipes(self, command, params): 315 def getRecipes(self, command, params):
298 try: 316 try:
299 mc = params[0] 317 mc = params[0]
@@ -500,6 +518,17 @@ class CommandsSync:
500 d = command.remotedatastores[dsindex].varhistory 518 d = command.remotedatastores[dsindex].varhistory
501 return getattr(d, method)(*args, **kwargs) 519 return getattr(d, method)(*args, **kwargs)
502 520
521 def dataStoreConnectorVarHistCmdEmit(self, command, params):
522 dsindex = params[0]
523 var = params[1]
524 oval = params[2]
525 val = params[3]
526 d = command.remotedatastores[params[4]]
527
528 o = io.StringIO()
529 command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
530 return o.getvalue()
531
503 def dataStoreConnectorIncHistCmd(self, command, params): 532 def dataStoreConnectorIncHistCmd(self, command, params):
504 dsindex = params[0] 533 dsindex = params[0]
505 method = params[1] 534 method = params[1]
@@ -521,8 +550,8 @@ class CommandsSync:
521 and return a datastore object representing the environment 550 and return a datastore object representing the environment
522 for the recipe. 551 for the recipe.
523 """ 552 """
524 fn = params[0] 553 virtualfn = params[0]
525 mc = bb.runqueue.mc_from_tid(fn) 554 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
526 appends = params[1] 555 appends = params[1]
527 appendlist = params[2] 556 appendlist = params[2]
528 if len(params) > 3: 557 if len(params) > 3:
@@ -537,6 +566,7 @@ class CommandsSync:
537 appendfiles = command.cooker.collections[mc].get_file_appends(fn) 566 appendfiles = command.cooker.collections[mc].get_file_appends(fn)
538 else: 567 else:
539 appendfiles = [] 568 appendfiles = []
569 layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2]
540 # We are calling bb.cache locally here rather than on the server, 570 # We are calling bb.cache locally here rather than on the server,
541 # but that's OK because it doesn't actually need anything from 571 # but that's OK because it doesn't actually need anything from
542 # the server barring the global datastore (which we have a remote 572 # the server barring the global datastore (which we have a remote
@@ -544,11 +574,10 @@ class CommandsSync:
544 if config_data: 574 if config_data:
545 # We have to use a different function here if we're passing in a datastore 575 # We have to use a different function here if we're passing in a datastore
546 # NOTE: we took a copy above, so we don't do it here again 576 # NOTE: we took a copy above, so we don't do it here again
547 envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)[''] 577 envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
548 else: 578 else:
549 # Use the standard path 579 # Use the standard path
550 parser = bb.cache.NoCache(command.cooker.databuilder) 580 envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
551 envdata = parser.loadDataFull(fn, appendfiles)
552 idx = command.remotedatastores.store(envdata) 581 idx = command.remotedatastores.store(envdata)
553 return DataStoreConnectionHandle(idx) 582 return DataStoreConnectionHandle(idx)
554 parseRecipeFile.readonly = True 583 parseRecipeFile.readonly = True
@@ -647,6 +676,16 @@ class CommandsAsync:
647 command.finishAsyncCommand() 676 command.finishAsyncCommand()
648 findFilesMatchingInDir.needcache = False 677 findFilesMatchingInDir.needcache = False
649 678
679 def testCookerCommandEvent(self, command, params):
680 """
681 Dummy command used by OEQA selftest to test tinfoil without IO
682 """
683 pattern = params[0]
684
685 command.cooker.testCookerCommandEvent(pattern)
686 command.finishAsyncCommand()
687 testCookerCommandEvent.needcache = False
688
650 def findConfigFilePath(self, command, params): 689 def findConfigFilePath(self, command, params):
651 """ 690 """
652 Find the path of the requested configuration file 691 Find the path of the requested configuration file
@@ -711,7 +750,7 @@ class CommandsAsync:
711 """ 750 """
712 event = params[0] 751 event = params[0]
713 bb.event.fire(eval(event), command.cooker.data) 752 bb.event.fire(eval(event), command.cooker.data)
714 command.currentAsyncCommand = None 753 process_server.clear_async_cmd()
715 triggerEvent.needcache = False 754 triggerEvent.needcache = False
716 755
717 def resetCooker(self, command, params): 756 def resetCooker(self, command, params):
@@ -738,7 +777,14 @@ class CommandsAsync:
738 (mc, pn) = bb.runqueue.split_mc(params[0]) 777 (mc, pn) = bb.runqueue.split_mc(params[0])
739 taskname = params[1] 778 taskname = params[1]
740 sigs = params[2] 779 sigs = params[2]
780 bb.siggen.check_siggen_version(bb.siggen)
741 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc]) 781 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
742 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc]) 782 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
743 command.finishAsyncCommand() 783 command.finishAsyncCommand()
744 findSigInfo.needcache = False 784 findSigInfo.needcache = False
785
786 def getTaskSignatures(self, command, params):
787 res = command.cooker.getTaskSignatures(params[0], params[1])
788 bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data)
789 command.finishAsyncCommand()
790 getTaskSignatures.needcache = True
diff --git a/bitbake/lib/bb/compress/_pipecompress.py b/bitbake/lib/bb/compress/_pipecompress.py
new file mode 100644
index 0000000000..4a403d62cf
--- /dev/null
+++ b/bitbake/lib/bb/compress/_pipecompress.py
@@ -0,0 +1,196 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# Helper library to implement streaming compression and decompression using an
7# external process
8#
9# This library should be used directly by end users; a wrapper library for the
10# specific compression tool should be created
11
12import builtins
13import io
14import os
15import subprocess
16
17
18def open_wrap(
19 cls, filename, mode="rb", *, encoding=None, errors=None, newline=None, **kwargs
20):
21 """
22 Open a compressed file in binary or text mode.
23
24 Users should not call this directly. A specific compression library can use
25 this helper to provide it's own "open" command
26
27 The filename argument can be an actual filename (a str or bytes object), or
28 an existing file object to read from or write to.
29
30 The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
31 binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
32 "rb".
33
34 For binary mode, this function is equivalent to the cls constructor:
35 cls(filename, mode). In this case, the encoding, errors and newline
36 arguments must not be provided.
37
38 For text mode, a cls object is created, and wrapped in an
39 io.TextIOWrapper instance with the specified encoding, error handling
40 behavior, and line ending(s).
41 """
42 if "t" in mode:
43 if "b" in mode:
44 raise ValueError("Invalid mode: %r" % (mode,))
45 else:
46 if encoding is not None:
47 raise ValueError("Argument 'encoding' not supported in binary mode")
48 if errors is not None:
49 raise ValueError("Argument 'errors' not supported in binary mode")
50 if newline is not None:
51 raise ValueError("Argument 'newline' not supported in binary mode")
52
53 file_mode = mode.replace("t", "")
54 if isinstance(filename, (str, bytes, os.PathLike, int)):
55 binary_file = cls(filename, file_mode, **kwargs)
56 elif hasattr(filename, "read") or hasattr(filename, "write"):
57 binary_file = cls(None, file_mode, fileobj=filename, **kwargs)
58 else:
59 raise TypeError("filename must be a str or bytes object, or a file")
60
61 if "t" in mode:
62 return io.TextIOWrapper(
63 binary_file, encoding, errors, newline, write_through=True
64 )
65 else:
66 return binary_file
67
68
69class CompressionError(OSError):
70 pass
71
72
73class PipeFile(io.RawIOBase):
74 """
75 Class that implements generically piping to/from a compression program
76
77 Derived classes should add the function get_compress() and get_decompress()
78 that return the required commands. Input will be piped into stdin and the
79 (de)compressed output should be written to stdout, e.g.:
80
81 class FooFile(PipeCompressionFile):
82 def get_decompress(self):
83 return ["fooc", "--decompress", "--stdout"]
84
85 def get_compress(self):
86 return ["fooc", "--compress", "--stdout"]
87
88 """
89
90 READ = 0
91 WRITE = 1
92
93 def __init__(self, filename=None, mode="rb", *, stderr=None, fileobj=None):
94 if "t" in mode or "U" in mode:
95 raise ValueError("Invalid mode: {!r}".format(mode))
96
97 if not "b" in mode:
98 mode += "b"
99
100 if mode.startswith("r"):
101 self.mode = self.READ
102 elif mode.startswith("w"):
103 self.mode = self.WRITE
104 else:
105 raise ValueError("Invalid mode %r" % mode)
106
107 if fileobj is not None:
108 self.fileobj = fileobj
109 else:
110 self.fileobj = builtins.open(filename, mode or "rb")
111
112 if self.mode == self.READ:
113 self.p = subprocess.Popen(
114 self.get_decompress(),
115 stdin=self.fileobj,
116 stdout=subprocess.PIPE,
117 stderr=stderr,
118 close_fds=True,
119 )
120 self.pipe = self.p.stdout
121 else:
122 self.p = subprocess.Popen(
123 self.get_compress(),
124 stdin=subprocess.PIPE,
125 stdout=self.fileobj,
126 stderr=stderr,
127 close_fds=True,
128 )
129 self.pipe = self.p.stdin
130
131 self.__closed = False
132
133 def _check_process(self):
134 if self.p is None:
135 return
136
137 returncode = self.p.wait()
138 if returncode:
139 raise CompressionError("Process died with %d" % returncode)
140 self.p = None
141
142 def close(self):
143 if self.closed:
144 return
145
146 self.pipe.close()
147 if self.p is not None:
148 self._check_process()
149 self.fileobj.close()
150
151 self.__closed = True
152
153 @property
154 def closed(self):
155 return self.__closed
156
157 def fileno(self):
158 return self.pipe.fileno()
159
160 def flush(self):
161 self.pipe.flush()
162
163 def isatty(self):
164 return self.pipe.isatty()
165
166 def readable(self):
167 return self.mode == self.READ
168
169 def writable(self):
170 return self.mode == self.WRITE
171
172 def readinto(self, b):
173 if self.mode != self.READ:
174 import errno
175
176 raise OSError(
177 errno.EBADF, "read() on write-only %s object" % self.__class__.__name__
178 )
179 size = self.pipe.readinto(b)
180 if size == 0:
181 self._check_process()
182 return size
183
184 def write(self, data):
185 if self.mode != self.WRITE:
186 import errno
187
188 raise OSError(
189 errno.EBADF, "write() on read-only %s object" % self.__class__.__name__
190 )
191 data = self.pipe.write(data)
192
193 if not data:
194 self._check_process()
195
196 return data
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
new file mode 100644
index 0000000000..88b0989322
--- /dev/null
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -0,0 +1,19 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8
9
10def open(*args, **kwargs):
11 return bb.compress._pipecompress.open_wrap(LZ4File, *args, **kwargs)
12
13
14class LZ4File(bb.compress._pipecompress.PipeFile):
15 def get_compress(self):
16 return ["lz4c", "-z", "-c"]
17
18 def get_decompress(self):
19 return ["lz4c", "-d", "-c"]
diff --git a/bitbake/lib/bb/compress/zstd.py b/bitbake/lib/bb/compress/zstd.py
new file mode 100644
index 0000000000..cdbbe9d60f
--- /dev/null
+++ b/bitbake/lib/bb/compress/zstd.py
@@ -0,0 +1,30 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8import shutil
9
10
11def open(*args, **kwargs):
12 return bb.compress._pipecompress.open_wrap(ZstdFile, *args, **kwargs)
13
14
15class ZstdFile(bb.compress._pipecompress.PipeFile):
16 def __init__(self, *args, num_threads=1, compresslevel=3, **kwargs):
17 self.num_threads = num_threads
18 self.compresslevel = compresslevel
19 super().__init__(*args, **kwargs)
20
21 def _get_zstd(self):
22 if self.num_threads == 1 or not shutil.which("pzstd"):
23 return ["zstd"]
24 return ["pzstd", "-p", "%d" % self.num_threads]
25
26 def get_compress(self):
27 return self._get_zstd() + ["-c", "-%d" % self.compresslevel]
28
29 def get_decompress(self):
30 return self._get_zstd() + ["-d", "-c"]
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index f4ab797edf..c5bfef55d6 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -13,7 +13,6 @@ import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15import multiprocessing
16import sre_constants
17import threading 16import threading
18from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
19from contextlib import closing 18from contextlib import closing
@@ -23,7 +22,6 @@ from bb import utils, data, parse, event, cache, providers, taskdata, runqueue,
23import queue 22import queue
24import signal 23import signal
25import prserv.serv 24import prserv.serv
26import pyinotify
27import json 25import json
28import pickle 26import pickle
29import codecs 27import codecs
@@ -81,7 +79,7 @@ class SkippedPackage:
81 79
82 80
83class CookerFeatures(object): 81class CookerFeatures(object):
84 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) 82 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
85 83
86 def __init__(self): 84 def __init__(self):
87 self._features=set() 85 self._features=set()
@@ -104,12 +102,15 @@ class CookerFeatures(object):
104 102
105class EventWriter: 103class EventWriter:
106 def __init__(self, cooker, eventfile): 104 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker 105 self.cooker = cooker
109 self.eventfile = eventfile 106 self.eventfile = eventfile
110 self.event_queue = [] 107 self.event_queue = []
111 108
112 def write_event(self, event): 109 def write_variables(self):
110 with open(self.eventfile, "a") as f:
111 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
112
113 def send(self, event):
113 with open(self.eventfile, "a") as f: 114 with open(self.eventfile, "a") as f:
114 try: 115 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 116 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +120,6 @@ class EventWriter:
119 import traceback 120 import traceback
120 print(err, traceback.format_exc()) 121 print(err, traceback.format_exc())
121 122
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144 123
145#============================================================================# 124#============================================================================#
146# BBCooker 125# BBCooker
@@ -150,8 +129,10 @@ class BBCooker:
150 Manages one bitbake build run 129 Manages one bitbake build run
151 """ 130 """
152 131
153 def __init__(self, featureSet=None, idleCallBackRegister=None): 132 def __init__(self, featureSet=None, server=None):
154 self.recipecaches = None 133 self.recipecaches = None
134 self.baseconfig_valid = False
135 self.parsecache_valid = False
155 self.eventlog = None 136 self.eventlog = None
156 self.skiplist = {} 137 self.skiplist = {}
157 self.featureset = CookerFeatures() 138 self.featureset = CookerFeatures()
@@ -159,34 +140,22 @@ class BBCooker:
159 for f in featureSet: 140 for f in featureSet:
160 self.featureset.setFeature(f) 141 self.featureset.setFeature(f)
161 142
143 self.orig_syspath = sys.path.copy()
144 self.orig_sysmodules = [*sys.modules]
145
162 self.configuration = bb.cookerdata.CookerConfiguration() 146 self.configuration = bb.cookerdata.CookerConfiguration()
163 147
164 self.idleCallBackRegister = idleCallBackRegister 148 self.process_server = server
149 self.idleCallBackRegister = None
150 self.waitIdle = None
151 if server:
152 self.idleCallBackRegister = server.register_idle_function
153 self.waitIdle = server.wait_for_idle
165 154
166 bb.debug(1, "BBCooker starting %s" % time.time()) 155 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush() 156
168 157 self.configwatched = {}
169 self.configwatcher = pyinotify.WatchManager() 158 self.parsewatched = {}
170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181 self.watcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190 159
191 # If being called by something like tinfoil, we need to clean cached data 160 # If being called by something like tinfoil, we need to clean cached data
192 # which may now be invalid 161 # which may now be invalid
@@ -197,14 +166,6 @@ class BBCooker:
197 self.hashserv = None 166 self.hashserv = None
198 self.hashservaddr = None 167 self.hashservaddr = None
199 168
200 self.inotify_modified_files = []
201
202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
204 return 1.0
205
206 self.idleCallBackRegister(_process_inotify_updates, self)
207
208 # TOSTOP must not be set or our children will hang when they output 169 # TOSTOP must not be set or our children will hang when they output
209 try: 170 try:
210 fd = sys.stdout.fileno() 171 fd = sys.stdout.fileno()
@@ -218,7 +179,7 @@ class BBCooker:
218 except UnsupportedOperation: 179 except UnsupportedOperation:
219 pass 180 pass
220 181
221 self.command = bb.command.Command(self) 182 self.command = bb.command.Command(self, self.process_server)
222 self.state = state.initial 183 self.state = state.initial
223 184
224 self.parser = None 185 self.parser = None
@@ -228,84 +189,37 @@ class BBCooker:
228 signal.signal(signal.SIGHUP, self.sigterm_exception) 189 signal.signal(signal.SIGHUP, self.sigterm_exception)
229 190
230 bb.debug(1, "BBCooker startup complete %s" % time.time()) 191 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232 192
233 def init_configdata(self): 193 def init_configdata(self):
234 if not hasattr(self, "data"): 194 if not hasattr(self, "data"):
235 self.initConfigurationData() 195 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 196 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ() 197 self.handlePRServ()
239 198
240 def process_inotify_updates(self): 199 def _baseconfig_set(self, value):
241 for n in [self.confignotifier, self.notifier]: 200 if value and not self.baseconfig_valid:
242 if n.check_events(timeout=0): 201 bb.server.process.serverlog("Base config valid")
243 # read notified events and enqeue them 202 elif not value and self.baseconfig_valid:
244 n.read_events() 203 bb.server.process.serverlog("Base config invalidated")
245 n.process_events() 204 self.baseconfig_valid = value
246 205
247 def config_notifications(self, event): 206 def _parsecache_set(self, value):
248 if event.maskname == "IN_Q_OVERFLOW": 207 if value and not self.parsecache_valid:
249 bb.warn("inotify event queue overflowed, invalidating caches.") 208 bb.server.process.serverlog("Parse cache valid")
250 self.parsecache_valid = False 209 elif not value and self.parsecache_valid:
251 self.baseconfig_valid = False 210 bb.server.process.serverlog("Parse cache invalidated")
252 bb.parse.clear_cache() 211 self.parsecache_valid = value
253 return 212
254 if not event.pathname in self.configwatcher.bbwatchedfiles: 213 def add_filewatch(self, deps, configwatcher=False):
255 return 214 if configwatcher:
256 if not event.pathname in self.inotify_modified_files: 215 watcher = self.configwatched
257 self.inotify_modified_files.append(event.pathname) 216 else:
258 self.baseconfig_valid = False 217 watcher = self.parsewatched
259
260 def notifications(self, event):
261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
268 return
269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
271 self.parsecache_valid = False
272 218
273 def add_filewatch(self, deps, watcher=None, dirs=False):
274 if not watcher:
275 watcher = self.watcher
276 for i in deps: 219 for i in deps:
277 watcher.bbwatchedfiles.add(i[0]) 220 f = i[0]
278 if dirs: 221 mtime = i[1]
279 f = i[0] 222 watcher[f] = mtime
280 else:
281 f = os.path.dirname(i[0])
282 if f in watcher.bbseen:
283 continue
284 watcher.bbseen.add(f)
285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
288 # the parser. The parent directory of these files may not exist, in which case we need
289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
293 watcher.bbwatchedfiles.add(watchtarget)
294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
301 watcher.bbseen.add(f)
302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309 223
310 def sigterm_exception(self, signum, stackframe): 224 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM: 225 if signum == signal.SIGTERM:
@@ -313,6 +227,7 @@ class BBCooker:
313 elif signum == signal.SIGHUP: 227 elif signum == signal.SIGHUP:
314 bb.warn("Cooker received SIGHUP, shutting down...") 228 bb.warn("Cooker received SIGHUP, shutting down...")
315 self.state = state.forceshutdown 229 self.state = state.forceshutdown
230 bb.event._should_exit.set()
316 231
317 def setFeatures(self, features): 232 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems 233 # we only accept a new feature set if we're in state initial, so we can reset without problems
@@ -330,6 +245,13 @@ class BBCooker:
330 self.state = state.initial 245 self.state = state.initial
331 self.caches_array = [] 246 self.caches_array = []
332 247
248 sys.path = self.orig_syspath.copy()
249 for mod in [*sys.modules]:
250 if mod not in self.orig_sysmodules:
251 del sys.modules[mod]
252
253 self.configwatched = {}
254
333 # Need to preserve BB_CONSOLELOG over resets 255 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None 256 consolelog = None
335 if hasattr(self, "data"): 257 if hasattr(self, "data"):
@@ -338,12 +260,12 @@ class BBCooker:
338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 260 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking() 261 self.enableDataTracking()
340 262
341 all_extra_cache_names = [] 263 caches_name_array = ['bb.cache:CoreRecipeInfo']
342 # We hardcode all known cache types in a single place, here. 264 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 265 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") 266 caches_name_array.append("bb.cache_extra:HobRecipeInfo")
345 267 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names 268 caches_name_array.append("bb.cache:SiggenRecipeInfo")
347 269
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 270 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed! 271 # This is the entry point, no further check needed!
@@ -362,6 +284,10 @@ class BBCooker:
362 self.data_hash = self.databuilder.data_hash 284 self.data_hash = self.databuilder.data_hash
363 self.extraconfigdata = {} 285 self.extraconfigdata = {}
364 286
287 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
288 if not self.configuration.writeeventlog and eventlog:
289 self.setupEventLog(eventlog)
290
365 if consolelog: 291 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog) 292 self.data.setVar("BB_CONSOLELOG", consolelog)
367 293
@@ -371,31 +297,42 @@ class BBCooker:
371 self.disableDataTracking() 297 self.disableDataTracking()
372 298
373 for mc in self.databuilder.mcdata.values(): 299 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends") 300 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376 301
377 self.baseconfig_valid = True 302 self._baseconfig_set(True)
378 self.parsecache_valid = False 303 self._parsecache_set(False)
379 304
380 def handlePRServ(self): 305 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration 306 # Setup a PR Server based on the new configuration
382 try: 307 try:
383 self.prhost = prserv.serv.auto_start(self.data) 308 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e: 309 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting") 310 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
386 311
387 if self.data.getVar("BB_HASHSERVE") == "auto": 312 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket 313 # Create a new hash server bound to a unix domain socket
389 if not self.hashserv: 314 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream:
318 import socket
319 try:
320 sock = socket.create_connection(upstream.split(":"), 5)
321 sock.close()
322 except socket.error as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
324 % (upstream, repr(e)))
325
391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False) 327 self.hashserv = hashserv.create_server(
393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever) 328 self.hashservaddr,
394 self.hashserv.process.start() 329 dbfile,
395 self.data.setVar("BB_HASHSERVE", self.hashservaddr) 330 sync=False,
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) 331 upstream=upstream,
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) 332 )
333 self.hashserv.serve_as_process(log_level=logging.WARNING)
398 for mc in self.databuilder.mcdata: 334 for mc in self.databuilder.mcdata:
335 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 336 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
400 337
401 bb.parse.init_parser(self.data) 338 bb.parse.init_parser(self.data)
@@ -410,6 +347,29 @@ class BBCooker:
410 if hasattr(self, "data"): 347 if hasattr(self, "data"):
411 self.data.disableTracking() 348 self.data.disableTracking()
412 349
350 def revalidateCaches(self):
351 bb.parse.clear_cache()
352
353 clean = True
354 for f in self.configwatched:
355 if not bb.parse.check_mtime(f, self.configwatched[f]):
356 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
357 self._baseconfig_set(False)
358 self._parsecache_set(False)
359 clean = False
360 break
361
362 if clean:
363 for f in self.parsewatched:
364 if not bb.parse.check_mtime(f, self.parsewatched[f]):
365 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
366 self._parsecache_set(False)
367 clean = False
368 break
369
370 if not clean:
371 bb.parse.BBHandler.cached_statements = {}
372
413 def parseConfiguration(self): 373 def parseConfiguration(self):
414 self.updateCacheSync() 374 self.updateCacheSync()
415 375
@@ -428,8 +388,24 @@ class BBCooker:
428 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 388 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
429 389
430 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 390 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
431 391 self.collections = {}
432 self.parsecache_valid = False 392 for mc in self.multiconfigs:
393 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
394
395 self._parsecache_set(False)
396
397 def setupEventLog(self, eventlog):
398 if self.eventlog and self.eventlog[0] != eventlog:
399 bb.event.unregister_UIHhandler(self.eventlog[1])
400 self.eventlog = None
401 if not self.eventlog or self.eventlog[0] != eventlog:
402 # we log all events to a file if so directed
403 # register the log file writer as UI Handler
404 if not os.path.exists(os.path.dirname(eventlog)):
405 bb.utils.mkdirhier(os.path.dirname(eventlog))
406 writer = EventWriter(self, eventlog)
407 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
408 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
433 409
434 def updateConfigOpts(self, options, environment, cmdline): 410 def updateConfigOpts(self, options, environment, cmdline):
435 self.ui_cmdline = cmdline 411 self.ui_cmdline = cmdline
@@ -450,14 +426,7 @@ class BBCooker:
450 setattr(self.configuration, o, options[o]) 426 setattr(self.configuration, o, options[o])
451 427
452 if self.configuration.writeeventlog: 428 if self.configuration.writeeventlog:
453 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: 429 self.setupEventLog(self.configuration.writeeventlog)
454 bb.event.unregister_UIHhandler(self.eventlog[1])
455 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
456 # we log all events to a file if so directed
457 # register the log file writer as UI Handler
458 writer = EventWriter(self, self.configuration.writeeventlog)
459 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
460 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
461 430
462 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 431 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
463 bb.msg.loggerDefaultDomains = self.configuration.debug_domains 432 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -487,37 +456,37 @@ class BBCooker:
487 # Now update all the variables not in the datastore to match 456 # Now update all the variables not in the datastore to match
488 self.configuration.env = environment 457 self.configuration.env = environment
489 458
459 self.revalidateCaches()
490 if not clean: 460 if not clean:
491 logger.debug("Base environment change, triggering reparse") 461 logger.debug("Base environment change, triggering reparse")
492 self.reset() 462 self.reset()
493 463
494 def runCommands(self, server, data, abort):
495 """
496 Run any queued asynchronous command
497 This is done by the idle handler so it runs in true context rather than
498 tied to any UI.
499 """
500
501 return self.command.runAsyncCommand()
502
503 def showVersions(self): 464 def showVersions(self):
504 465
505 (latest_versions, preferred_versions) = self.findProviders() 466 (latest_versions, preferred_versions, required) = self.findProviders()
506 467
507 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") 468 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
508 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") 469 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
509 470
510 for p in sorted(self.recipecaches[''].pkg_pn): 471 for p in sorted(self.recipecaches[''].pkg_pn):
511 pref = preferred_versions[p] 472 preferred = preferred_versions[p]
512 latest = latest_versions[p] 473 latest = latest_versions[p]
474 requiredstr = ""
475 preferredstr = ""
476 if required[p]:
477 if preferred[0] is not None:
478 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
479 else:
480 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
481 else:
482 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
513 483
514 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
515 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 484 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
516 485
517 if pref == latest: 486 if preferred == latest:
518 prefstr = "" 487 preferredstr = ""
519 488
520 logger.plain("%-35s %25s %25s", p, lateststr, prefstr) 489 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
521 490
522 def showEnvironment(self, buildfile=None, pkgs_to_build=None): 491 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
523 """ 492 """
@@ -533,6 +502,8 @@ class BBCooker:
533 if not orig_tracking: 502 if not orig_tracking:
534 self.enableDataTracking() 503 self.enableDataTracking()
535 self.reset() 504 self.reset()
505 # reset() resets to the UI requested value so we have to redo this
506 self.enableDataTracking()
536 507
537 def mc_base(p): 508 def mc_base(p):
538 if p.startswith('mc:'): 509 if p.startswith('mc:'):
@@ -556,21 +527,21 @@ class BBCooker:
556 if pkgs_to_build[0] in set(ignore.split()): 527 if pkgs_to_build[0] in set(ignore.split()):
557 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 528 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
558 529
559 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True) 530 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
560 531
561 mc = runlist[0][0] 532 mc = runlist[0][0]
562 fn = runlist[0][3] 533 fn = runlist[0][3]
563 534
564 if fn: 535 if fn:
565 try: 536 try:
566 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 537 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
567 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) 538 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
568 except Exception as e: 539 except Exception as e:
569 parselog.exception("Unable to read %s", fn) 540 parselog.exception("Unable to read %s", fn)
570 raise 541 raise
571 else: 542 else:
572 if not mc in self.databuilder.mcdata: 543 if not mc in self.databuilder.mcdata:
573 bb.fatal('Not multiconfig named "%s" found' % mc) 544 bb.fatal('No multiconfig named "%s" found' % mc)
574 envdata = self.databuilder.mcdata[mc] 545 envdata = self.databuilder.mcdata[mc]
575 data.expandKeys(envdata) 546 data.expandKeys(envdata)
576 parse.ast.runAnonFuncs(envdata) 547 parse.ast.runAnonFuncs(envdata)
@@ -585,7 +556,7 @@ class BBCooker:
585 data.emit_env(env, envdata, True) 556 data.emit_env(env, envdata, True)
586 logger.plain(env.getvalue()) 557 logger.plain(env.getvalue())
587 558
588 # emit the metadata which isnt valid shell 559 # emit the metadata which isn't valid shell
589 for e in sorted(envdata.keys()): 560 for e in sorted(envdata.keys()):
590 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 561 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
591 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 562 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -594,7 +565,7 @@ class BBCooker:
594 self.disableDataTracking() 565 self.disableDataTracking()
595 self.reset() 566 self.reset()
596 567
597 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False): 568 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
598 """ 569 """
599 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 570 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
600 """ 571 """
@@ -641,7 +612,7 @@ class BBCooker:
641 localdata = {} 612 localdata = {}
642 613
643 for mc in self.multiconfigs: 614 for mc in self.multiconfigs:
644 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) 615 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist, allowincomplete=allowincomplete)
645 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 616 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc])
646 bb.data.expandKeys(localdata[mc]) 617 bb.data.expandKeys(localdata[mc])
647 618
@@ -690,19 +661,18 @@ class BBCooker:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 661 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691 mcdeps |= set(taskdata[mc].get_mcdepends()) 662 mcdeps |= set(taskdata[mc].get_mcdepends())
692 new = False 663 new = False
693 for mc in self.multiconfigs: 664 for k in mcdeps:
694 for k in mcdeps: 665 if k in seen:
695 if k in seen: 666 continue
696 continue 667 l = k.split(':')
697 l = k.split(':') 668 depmc = l[2]
698 depmc = l[2] 669 if depmc not in self.multiconfigs:
699 if depmc not in self.multiconfigs: 670 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
700 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 671 else:
701 else: 672 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
702 logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 673 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
703 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 674 seen.add(k)
704 seen.add(k) 675 new = True
705 new = True
706 676
707 for mc in self.multiconfigs: 677 for mc in self.multiconfigs:
708 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 678 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -715,7 +685,7 @@ class BBCooker:
715 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
716 """ 686 """
717 687
718 # We set abort to False here to prevent unbuildable targets raising 688 # We set halt to False here to prevent unbuildable targets raising
719 # an exception when we're just generating data 689 # an exception when we're just generating data
720 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True)
721 691
@@ -792,7 +762,9 @@ class BBCooker:
792 for dep in rq.rqdata.runtaskentries[tid].depends: 762 for dep in rq.rqdata.runtaskentries[tid].depends:
793 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 763 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
794 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 764 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
795 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep))) 765 if depmc:
766 depmc = "mc:" + depmc + ":"
767 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
796 if taskfn not in seen_fns: 768 if taskfn not in seen_fns:
797 seen_fns.append(taskfn) 769 seen_fns.append(taskfn)
798 packages = [] 770 packages = []
@@ -1056,6 +1028,11 @@ class BBCooker:
1056 if matches: 1028 if matches:
1057 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1029 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1058 1030
1031 def testCookerCommandEvent(self, filepattern):
1032 # Dummy command used by OEQA selftest to test tinfoil without IO
1033 matches = ["A", "B"]
1034 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1035
1059 def findProviders(self, mc=''): 1036 def findProviders(self, mc=''):
1060 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1037 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061 1038
@@ -1063,10 +1040,16 @@ class BBCooker:
1063 if pn in self.recipecaches[mc].providers: 1040 if pn in self.recipecaches[mc].providers:
1064 filenames = self.recipecaches[mc].providers[pn] 1041 filenames = self.recipecaches[mc].providers[pn]
1065 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1042 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1066 filename = eligible[0] 1043 if eligible is not None:
1044 filename = eligible[0]
1045 else:
1046 filename = None
1067 return None, None, None, filename 1047 return None, None, None, filename
1068 elif pn in self.recipecaches[mc].pkg_pn: 1048 elif pn in self.recipecaches[mc].pkg_pn:
1069 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1049 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1050 if required and preferred_file is None:
1051 return None, None, None, None
1052 return (latest, latest_f, preferred_ver, preferred_file)
1070 else: 1053 else:
1071 return None, None, None, None 1054 return None, None, None, None
1072 1055
@@ -1211,15 +1194,15 @@ class BBCooker:
1211 except bb.utils.VersionStringException as vse: 1194 except bb.utils.VersionStringException as vse:
1212 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1195 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1213 if not res: 1196 if not res:
1214 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1197 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1215 continue 1198 continue
1216 else: 1199 else:
1217 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1200 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1218 continue 1201 continue
1219 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1202 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1220 collection_depends[c].append(rec) 1203 collection_depends[c].append(rec)
1221 else: 1204 else:
1222 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1205 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1223 1206
1224 # Recursively work out collection priorities based on dependencies 1207 # Recursively work out collection priorities based on dependencies
1225 def calc_layer_priority(collection): 1208 def calc_layer_priority(collection):
@@ -1231,7 +1214,7 @@ class BBCooker:
1231 if depprio > max_depprio: 1214 if depprio > max_depprio:
1232 max_depprio = depprio 1215 max_depprio = depprio
1233 max_depprio += 1 1216 max_depprio += 1
1234 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) 1217 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1235 collection_priorities[collection] = max_depprio 1218 collection_priorities[collection] = max_depprio
1236 1219
1237 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1220 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1243,7 +1226,7 @@ class BBCooker:
1243 errors = True 1226 errors = True
1244 continue 1227 continue
1245 elif regex == "": 1228 elif regex == "":
1246 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) 1229 parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1247 cre = re.compile('^NULL$') 1230 cre = re.compile('^NULL$')
1248 errors = False 1231 errors = False
1249 else: 1232 else:
@@ -1290,8 +1273,8 @@ class BBCooker:
1290 if bf.startswith("/") or bf.startswith("../"): 1273 if bf.startswith("/") or bf.startswith("../"):
1291 bf = os.path.abspath(bf) 1274 bf = os.path.abspath(bf)
1292 1275
1293 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1276 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1294 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1277 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1295 try: 1278 try:
1296 os.stat(bf) 1279 os.stat(bf)
1297 bf = os.path.abspath(bf) 1280 bf = os.path.abspath(bf)
@@ -1357,7 +1340,8 @@ class BBCooker:
1357 1340
1358 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1341 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array)
1359 1342
1360 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) 1343 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1344 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1361 infos = dict(infos) 1345 infos = dict(infos)
1362 1346
1363 fn = bb.cache.realfn2virtual(fn, cls, mc) 1347 fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1383,14 +1367,16 @@ class BBCooker:
1383 self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1367 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1384 self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1368 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1385 1369
1370 bb.parse.siggen.setup_datacache(self.recipecaches)
1371
1386 # Invalidate task for target if force mode active 1372 # Invalidate task for target if force mode active
1387 if self.configuration.force: 1373 if self.configuration.force:
1388 logger.verbose("Invalidate task %s, %s", task, fn) 1374 logger.verbose("Invalidate task %s, %s", task, fn)
1389 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) 1375 bb.parse.siggen.invalidate_task(task, fn)
1390 1376
1391 # Setup taskdata structure 1377 # Setup taskdata structure
1392 taskdata = {} 1378 taskdata = {}
1393 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) 1379 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1394 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1380 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1395 1381
1396 if quietlog: 1382 if quietlog:
@@ -1400,17 +1386,20 @@ class BBCooker:
1400 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1386 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1401 if fireevents: 1387 if fireevents:
1402 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1388 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1389 if self.eventlog:
1390 self.eventlog[2].write_variables()
1391 bb.event.enable_heartbeat()
1403 1392
1404 # Execute the runqueue 1393 # Execute the runqueue
1405 runlist = [[mc, item, task, fn]] 1394 runlist = [[mc, item, task, fn]]
1406 1395
1407 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1396 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1408 1397
1409 def buildFileIdle(server, rq, abort): 1398 def buildFileIdle(server, rq, halt):
1410 1399
1411 msg = None 1400 msg = None
1412 interrupted = 0 1401 interrupted = 0
1413 if abort or self.state == state.forceshutdown: 1402 if halt or self.state == state.forceshutdown:
1414 rq.finish_runqueue(True) 1403 rq.finish_runqueue(True)
1415 msg = "Forced shutdown" 1404 msg = "Forced shutdown"
1416 interrupted = 2 1405 interrupted = 2
@@ -1425,37 +1414,68 @@ class BBCooker:
1425 failures += len(exc.args) 1414 failures += len(exc.args)
1426 retval = False 1415 retval = False
1427 except SystemExit as exc: 1416 except SystemExit as exc:
1428 self.command.finishAsyncCommand(str(exc))
1429 if quietlog: 1417 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel) 1418 bb.runqueue.logger.setLevel(rqloglevel)
1431 return False 1419 return bb.server.process.idleFinish(str(exc))
1432 1420
1433 if not retval: 1421 if not retval:
1434 if fireevents: 1422 if fireevents:
1435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1423 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1436 self.command.finishAsyncCommand(msg) 1424 bb.event.disable_heartbeat()
1437 # We trashed self.recipecaches above 1425 # We trashed self.recipecaches above
1438 self.parsecache_valid = False 1426 self._parsecache_set(False)
1439 self.configuration.limited_deps = False 1427 self.configuration.limited_deps = False
1440 bb.parse.siggen.reset(self.data) 1428 bb.parse.siggen.reset(self.data)
1441 if quietlog: 1429 if quietlog:
1442 bb.runqueue.logger.setLevel(rqloglevel) 1430 bb.runqueue.logger.setLevel(rqloglevel)
1443 return False 1431 return bb.server.process.idleFinish(msg)
1444 if retval is True: 1432 if retval is True:
1445 return True 1433 return True
1446 return retval 1434 return retval
1447 1435
1448 self.idleCallBackRegister(buildFileIdle, rq) 1436 self.idleCallBackRegister(buildFileIdle, rq)
1449 1437
1438 def getTaskSignatures(self, target, tasks):
1439 sig = []
1440 getAllTaskSignatures = False
1441
1442 if not tasks:
1443 tasks = ["do_build"]
1444 getAllTaskSignatures = True
1445
1446 for task in tasks:
1447 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1448 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1449 rq.rqdata.prepare()
1450
1451 for l in runlist:
1452 mc, pn, taskname, fn = l
1453
1454 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1455 for t in taskdep['tasks']:
1456 if t in taskdep['nostamp'] or "setscene" in t:
1457 continue
1458 tid = bb.runqueue.build_tid(mc, fn, t)
1459
1460 if t in task or getAllTaskSignatures:
1461 try:
1462 rq.rqdata.prepare_task_hash(tid)
1463 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1464 except KeyError:
1465 sig.append(self.getTaskSignatures(target, [t])[0])
1466
1467 return sig
1468
1450 def buildTargets(self, targets, task): 1469 def buildTargets(self, targets, task):
1451 """ 1470 """
1452 Attempt to build the targets specified 1471 Attempt to build the targets specified
1453 """ 1472 """
1454 1473
1455 def buildTargetsIdle(server, rq, abort): 1474 def buildTargetsIdle(server, rq, halt):
1456 msg = None 1475 msg = None
1457 interrupted = 0 1476 interrupted = 0
1458 if abort or self.state == state.forceshutdown: 1477 if halt or self.state == state.forceshutdown:
1478 bb.event._should_exit.set()
1459 rq.finish_runqueue(True) 1479 rq.finish_runqueue(True)
1460 msg = "Forced shutdown" 1480 msg = "Forced shutdown"
1461 interrupted = 2 1481 interrupted = 2
@@ -1470,16 +1490,16 @@ class BBCooker:
1470 failures += len(exc.args) 1490 failures += len(exc.args)
1471 retval = False 1491 retval = False
1472 except SystemExit as exc: 1492 except SystemExit as exc:
1473 self.command.finishAsyncCommand(str(exc)) 1493 return bb.server.process.idleFinish(str(exc))
1474 return False
1475 1494
1476 if not retval: 1495 if not retval:
1477 try: 1496 try:
1478 for mc in self.multiconfigs: 1497 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1498 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1480 finally: 1499 finally:
1481 self.command.finishAsyncCommand(msg) 1500 bb.event.disable_heartbeat()
1482 return False 1501 return bb.server.process.idleFinish(msg)
1502
1483 if retval is True: 1503 if retval is True:
1484 return True 1504 return True
1485 return retval 1505 return retval
@@ -1498,7 +1518,7 @@ class BBCooker:
1498 1518
1499 bb.event.fire(bb.event.BuildInit(packages), self.data) 1519 bb.event.fire(bb.event.BuildInit(packages), self.data)
1500 1520
1501 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) 1521 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1502 1522
1503 buildname = self.data.getVar("BUILDNAME", False) 1523 buildname = self.data.getVar("BUILDNAME", False)
1504 1524
@@ -1511,6 +1531,9 @@ class BBCooker:
1511 1531
1512 for mc in self.multiconfigs: 1532 for mc in self.multiconfigs:
1513 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1533 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1534 if self.eventlog:
1535 self.eventlog[2].write_variables()
1536 bb.event.enable_heartbeat()
1514 1537
1515 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1538 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1516 if 'universe' in targets: 1539 if 'universe' in targets:
@@ -1520,7 +1543,13 @@ class BBCooker:
1520 1543
1521 1544
1522 def getAllKeysWithFlags(self, flaglist): 1545 def getAllKeysWithFlags(self, flaglist):
1546 def dummy_autorev(d):
1547 return
1548
1523 dump = {} 1549 dump = {}
1550 # Horrible but for now we need to avoid any sideeffects of autorev being called
1551 saved = bb.fetch2.get_autorev
1552 bb.fetch2.get_autorev = dummy_autorev
1524 for k in self.data.keys(): 1553 for k in self.data.keys():
1525 try: 1554 try:
1526 expand = True 1555 expand = True
@@ -1540,6 +1569,7 @@ class BBCooker:
1540 dump[k][d] = None 1569 dump[k][d] = None
1541 except Exception as e: 1570 except Exception as e:
1542 print(e) 1571 print(e)
1572 bb.fetch2.get_autorev = saved
1543 return dump 1573 return dump
1544 1574
1545 1575
@@ -1547,13 +1577,6 @@ class BBCooker:
1547 if self.state == state.running: 1577 if self.state == state.running:
1548 return 1578 return
1549 1579
1550 # reload files for which we got notifications
1551 for p in self.inotify_modified_files:
1552 bb.parse.update_cache(p)
1553 if p in bb.parse.BBHandler.cached_statements:
1554 del bb.parse.BBHandler.cached_statements[p]
1555 self.inotify_modified_files = []
1556
1557 if not self.baseconfig_valid: 1580 if not self.baseconfig_valid:
1558 logger.debug("Reloading base configuration data") 1581 logger.debug("Reloading base configuration data")
1559 self.initConfigurationData() 1582 self.initConfigurationData()
@@ -1566,7 +1589,7 @@ class BBCooker:
1566 1589
1567 if self.state in (state.shutdown, state.forceshutdown, state.error): 1590 if self.state in (state.shutdown, state.forceshutdown, state.error):
1568 if hasattr(self.parser, 'shutdown'): 1591 if hasattr(self.parser, 'shutdown'):
1569 self.parser.shutdown(clean=False, force = True) 1592 self.parser.shutdown(clean=False)
1570 self.parser.final_cleanup() 1593 self.parser.final_cleanup()
1571 raise bb.BBHandledException() 1594 raise bb.BBHandledException()
1572 1595
@@ -1574,6 +1597,9 @@ class BBCooker:
1574 self.updateCacheSync() 1597 self.updateCacheSync()
1575 1598
1576 if self.state != state.parsing and not self.parsecache_valid: 1599 if self.state != state.parsing and not self.parsecache_valid:
1600 bb.server.process.serverlog("Parsing started")
1601 self.parsewatched = {}
1602
1577 bb.parse.siggen.reset(self.data) 1603 bb.parse.siggen.reset(self.data)
1578 self.parseConfiguration () 1604 self.parseConfiguration ()
1579 if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1605 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
@@ -1587,30 +1613,27 @@ class BBCooker:
1587 for dep in self.configuration.extra_assume_provided: 1613 for dep in self.configuration.extra_assume_provided:
1588 self.recipecaches[mc].ignored_dependencies.add(dep) 1614 self.recipecaches[mc].ignored_dependencies.add(dep)
1589 1615
1590 self.collections = {}
1591
1592 mcfilelist = {} 1616 mcfilelist = {}
1593 total_masked = 0 1617 total_masked = 0
1594 searchdirs = set() 1618 searchdirs = set()
1595 for mc in self.multiconfigs: 1619 for mc in self.multiconfigs:
1596 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1597 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1620 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1598 1621
1599 mcfilelist[mc] = filelist 1622 mcfilelist[mc] = filelist
1600 total_masked += masked 1623 total_masked += masked
1601 searchdirs |= set(search) 1624 searchdirs |= set(search)
1602 1625
1603 # Add inotify watches for directories searched for bb/bbappend files 1626 # Add mtimes for directories searched for bb/bbappend files
1604 for dirent in searchdirs: 1627 for dirent in searchdirs:
1605 self.add_filewatch([[dirent]], dirs=True) 1628 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1606 1629
1607 self.parser = CookerParser(self, mcfilelist, total_masked) 1630 self.parser = CookerParser(self, mcfilelist, total_masked)
1608 self.parsecache_valid = True 1631 self._parsecache_set(True)
1609 1632
1610 self.state = state.parsing 1633 self.state = state.parsing
1611 1634
1612 if not self.parser.parse_next(): 1635 if not self.parser.parse_next():
1613 collectlog.debug(1, "parsing complete") 1636 collectlog.debug("parsing complete")
1614 if self.parser.error: 1637 if self.parser.error:
1615 raise bb.BBHandledException() 1638 raise bb.BBHandledException()
1616 self.show_appends_with_no_recipes() 1639 self.show_appends_with_no_recipes()
@@ -1633,7 +1656,7 @@ class BBCooker:
1633 # Return a copy, don't modify the original 1656 # Return a copy, don't modify the original
1634 pkgs_to_build = pkgs_to_build[:] 1657 pkgs_to_build = pkgs_to_build[:]
1635 1658
1636 if len(pkgs_to_build) == 0: 1659 if not pkgs_to_build:
1637 raise NothingToBuild 1660 raise NothingToBuild
1638 1661
1639 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1662 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1655,7 +1678,7 @@ class BBCooker:
1655 1678
1656 if 'universe' in pkgs_to_build: 1679 if 'universe' in pkgs_to_build:
1657 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1680 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1658 parselog.debug(1, "collating packages for \"universe\"") 1681 parselog.debug("collating packages for \"universe\"")
1659 pkgs_to_build.remove('universe') 1682 pkgs_to_build.remove('universe')
1660 for mc in self.multiconfigs: 1683 for mc in self.multiconfigs:
1661 for t in self.recipecaches[mc].universe_target: 1684 for t in self.recipecaches[mc].universe_target:
@@ -1680,26 +1703,36 @@ class BBCooker:
1680 def post_serve(self): 1703 def post_serve(self):
1681 self.shutdown(force=True) 1704 self.shutdown(force=True)
1682 prserv.serv.auto_shutdown() 1705 prserv.serv.auto_shutdown()
1706 if hasattr(bb.parse, "siggen"):
1707 bb.parse.siggen.exit()
1683 if self.hashserv: 1708 if self.hashserv:
1684 self.hashserv.process.terminate() 1709 self.hashserv.process.terminate()
1685 self.hashserv.process.join() 1710 self.hashserv.process.join()
1686 if hasattr(self, "data"): 1711 if hasattr(self, "data"):
1687 bb.event.fire(CookerExit(), self.data) 1712 bb.event.fire(CookerExit(), self.data)
1688 1713
1689 def shutdown(self, force = False): 1714 def shutdown(self, force=False):
1690 if force: 1715 if force:
1691 self.state = state.forceshutdown 1716 self.state = state.forceshutdown
1717 bb.event._should_exit.set()
1692 else: 1718 else:
1693 self.state = state.shutdown 1719 self.state = state.shutdown
1694 1720
1695 if self.parser: 1721 if self.parser:
1696 self.parser.shutdown(clean=not force, force=force) 1722 self.parser.shutdown(clean=False)
1697 self.parser.final_cleanup() 1723 self.parser.final_cleanup()
1698 1724
1699 def finishcommand(self): 1725 def finishcommand(self):
1726 if hasattr(self.parser, 'shutdown'):
1727 self.parser.shutdown(clean=False)
1728 self.parser.final_cleanup()
1700 self.state = state.initial 1729 self.state = state.initial
1730 bb.event._should_exit.clear()
1701 1731
1702 def reset(self): 1732 def reset(self):
1733 if hasattr(bb.parse, "siggen"):
1734 bb.parse.siggen.exit()
1735 self.finishcommand()
1703 self.initConfigurationData() 1736 self.initConfigurationData()
1704 self.handlePRServ() 1737 self.handlePRServ()
1705 1738
@@ -1711,9 +1744,9 @@ class BBCooker:
1711 if hasattr(self, "data"): 1744 if hasattr(self, "data"):
1712 self.databuilder.reset() 1745 self.databuilder.reset()
1713 self.data = self.databuilder.data 1746 self.data = self.databuilder.data
1714 self.parsecache_valid = False 1747 # In theory tinfoil could have modified the base data before parsing,
1715 self.baseconfig_valid = False 1748 # ideally need to track if anything did modify the datastore
1716 1749 self._parsecache_set(False)
1717 1750
1718class CookerExit(bb.event.Event): 1751class CookerExit(bb.event.Event):
1719 """ 1752 """
@@ -1728,16 +1761,16 @@ class CookerCollectFiles(object):
1728 def __init__(self, priorities, mc=''): 1761 def __init__(self, priorities, mc=''):
1729 self.mc = mc 1762 self.mc = mc
1730 self.bbappends = [] 1763 self.bbappends = []
1731 # Priorities is a list of tupples, with the second element as the pattern. 1764 # Priorities is a list of tuples, with the second element as the pattern.
1732 # We need to sort the list with the longest pattern first, and so on to 1765 # We need to sort the list with the longest pattern first, and so on to
1733 # the shortest. This allows nested layers to be properly evaluated. 1766 # the shortest. This allows nested layers to be properly evaluated.
1734 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1767 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1735 1768
1736 def calc_bbfile_priority(self, filename): 1769 def calc_bbfile_priority(self, filename):
1737 for _, _, regex, pri in self.bbfile_config_priorities: 1770 for layername, _, regex, pri in self.bbfile_config_priorities:
1738 if regex.match(filename): 1771 if regex.match(filename):
1739 return pri, regex 1772 return pri, regex, layername
1740 return 0, None 1773 return 0, None, None
1741 1774
1742 def get_bbfiles(self): 1775 def get_bbfiles(self):
1743 """Get list of default .bb files by reading out the current directory""" 1776 """Get list of default .bb files by reading out the current directory"""
@@ -1756,7 +1789,7 @@ class CookerCollectFiles(object):
1756 for ignored in ('SCCS', 'CVS', '.svn'): 1789 for ignored in ('SCCS', 'CVS', '.svn'):
1757 if ignored in dirs: 1790 if ignored in dirs:
1758 dirs.remove(ignored) 1791 dirs.remove(ignored)
1759 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] 1792 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1760 1793
1761 return found 1794 return found
1762 1795
@@ -1764,7 +1797,7 @@ class CookerCollectFiles(object):
1764 """Collect all available .bb build files""" 1797 """Collect all available .bb build files"""
1765 masked = 0 1798 masked = 0
1766 1799
1767 collectlog.debug(1, "collecting .bb files") 1800 collectlog.debug("collecting .bb files")
1768 1801
1769 files = (config.getVar( "BBFILES") or "").split() 1802 files = (config.getVar( "BBFILES") or "").split()
1770 1803
@@ -1772,14 +1805,14 @@ class CookerCollectFiles(object):
1772 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1805 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1773 config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1806 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1774 1807
1775 if not len(files): 1808 if not files:
1776 files = self.get_bbfiles() 1809 files = self.get_bbfiles()
1777 1810
1778 if not len(files): 1811 if not files:
1779 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1812 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1780 bb.event.fire(CookerExit(), eventdata) 1813 bb.event.fire(CookerExit(), eventdata)
1781 1814
1782 # We need to track where we look so that we can add inotify watches. There 1815 # We need to track where we look so that we can know when the cache is invalid. There
1783 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1816 # is no nice way to do this, this is horrid. We intercept the os.listdir()
1784 # (or os.scandir() for python 3.6+) calls while we run glob(). 1817 # (or os.scandir() for python 3.6+) calls while we run glob().
1785 origlistdir = os.listdir 1818 origlistdir = os.listdir
@@ -1835,7 +1868,7 @@ class CookerCollectFiles(object):
1835 try: 1868 try:
1836 re.compile(mask) 1869 re.compile(mask)
1837 bbmasks.append(mask) 1870 bbmasks.append(mask)
1838 except sre_constants.error: 1871 except re.error:
1839 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1872 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1840 1873
1841 # Then validate the combined regular expressions. This should never 1874 # Then validate the combined regular expressions. This should never
@@ -1843,7 +1876,7 @@ class CookerCollectFiles(object):
1843 bbmask = "|".join(bbmasks) 1876 bbmask = "|".join(bbmasks)
1844 try: 1877 try:
1845 bbmask_compiled = re.compile(bbmask) 1878 bbmask_compiled = re.compile(bbmask)
1846 except sre_constants.error: 1879 except re.error:
1847 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1880 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1848 bbmask = None 1881 bbmask = None
1849 1882
@@ -1851,7 +1884,7 @@ class CookerCollectFiles(object):
1851 bbappend = [] 1884 bbappend = []
1852 for f in newfiles: 1885 for f in newfiles:
1853 if bbmask and bbmask_compiled.search(f): 1886 if bbmask and bbmask_compiled.search(f):
1854 collectlog.debug(1, "skipping masked file %s", f) 1887 collectlog.debug("skipping masked file %s", f)
1855 masked += 1 1888 masked += 1
1856 continue 1889 continue
1857 if f.endswith('.bb'): 1890 if f.endswith('.bb'):
@@ -1859,7 +1892,7 @@ class CookerCollectFiles(object):
1859 elif f.endswith('.bbappend'): 1892 elif f.endswith('.bbappend'):
1860 bbappend.append(f) 1893 bbappend.append(f)
1861 else: 1894 else:
1862 collectlog.debug(1, "skipping %s: unknown file extension", f) 1895 collectlog.debug("skipping %s: unknown file extension", f)
1863 1896
1864 # Build a list of .bbappend files for each .bb file 1897 # Build a list of .bbappend files for each .bb file
1865 for f in bbappend: 1898 for f in bbappend:
@@ -1910,7 +1943,7 @@ class CookerCollectFiles(object):
1910 # Calculate priorities for each file 1943 # Calculate priorities for each file
1911 for p in pkgfns: 1944 for p in pkgfns:
1912 realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1945 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1913 priorities[p], regex = self.calc_bbfile_priority(realfn) 1946 priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1914 if regex in unmatched_regex: 1947 if regex in unmatched_regex:
1915 matched_regex.add(regex) 1948 matched_regex.add(regex)
1916 unmatched_regex.remove(regex) 1949 unmatched_regex.remove(regex)
@@ -1961,15 +1994,30 @@ class ParsingFailure(Exception):
1961 Exception.__init__(self, realexception, recipe) 1994 Exception.__init__(self, realexception, recipe)
1962 1995
1963class Parser(multiprocessing.Process): 1996class Parser(multiprocessing.Process):
1964 def __init__(self, jobs, results, quit, init, profile): 1997 def __init__(self, jobs, results, quit, profile):
1965 self.jobs = jobs 1998 self.jobs = jobs
1966 self.results = results 1999 self.results = results
1967 self.quit = quit 2000 self.quit = quit
1968 self.init = init
1969 multiprocessing.Process.__init__(self) 2001 multiprocessing.Process.__init__(self)
1970 self.context = bb.utils.get_context().copy() 2002 self.context = bb.utils.get_context().copy()
1971 self.handlers = bb.event.get_class_handlers().copy() 2003 self.handlers = bb.event.get_class_handlers().copy()
1972 self.profile = profile 2004 self.profile = profile
2005 self.queue_signals = False
2006 self.signal_received = []
2007 self.signal_threadlock = threading.Lock()
2008
2009 def catch_sig(self, signum, frame):
2010 if self.queue_signals:
2011 self.signal_received.append(signum)
2012 else:
2013 self.handle_sig(signum, frame)
2014
2015 def handle_sig(self, signum, frame):
2016 if signum == signal.SIGTERM:
2017 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2018 os.kill(os.getpid(), signal.SIGTERM)
2019 elif signum == signal.SIGINT:
2020 signal.default_int_handler(signum, frame)
1973 2021
1974 def run(self): 2022 def run(self):
1975 2023
@@ -1989,38 +2037,50 @@ class Parser(multiprocessing.Process):
1989 prof.dump_stats(logfile) 2037 prof.dump_stats(logfile)
1990 2038
1991 def realrun(self): 2039 def realrun(self):
1992 if self.init: 2040 # Signal handling here is hard. We must not terminate any process or thread holding the write
1993 self.init() 2041 # lock for the event stream as it will not be released, ever, and things will hang.
2042 # Python handles signals in the main thread/process but they can be raised from any thread and
2043 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2044 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2045 # new thread should also do so) and we defer handling but we handle with the local thread lock
2046 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2047 # can be in the critical section.
2048 signal.signal(signal.SIGTERM, self.catch_sig)
2049 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2050 signal.signal(signal.SIGINT, self.catch_sig)
2051 bb.utils.set_process_name(multiprocessing.current_process().name)
2052 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2053 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
1994 2054
1995 pending = [] 2055 pending = []
1996 while True: 2056 havejobs = True
1997 try: 2057 try:
1998 self.quit.get_nowait() 2058 while havejobs or pending:
1999 except queue.Empty: 2059 if self.quit.is_set():
2000 pass 2060 break
2001 else:
2002 self.results.close()
2003 self.results.join_thread()
2004 break
2005 2061
2006 if pending: 2062 job = None
2007 result = pending.pop()
2008 else:
2009 try: 2063 try:
2010 job = self.jobs.pop() 2064 job = self.jobs.pop()
2011 except IndexError: 2065 except IndexError:
2012 self.results.close() 2066 havejobs = False
2013 self.results.join_thread() 2067 if job:
2014 break 2068 result = self.parse(*job)
2015 result = self.parse(*job) 2069 # Clear the siggen cache after parsing to control memory usage, its huge
2016 # Clear the siggen cache after parsing to control memory usage, its huge 2070 bb.parse.siggen.postparsing_clean_cache()
2017 bb.parse.siggen.postparsing_clean_cache() 2071 pending.append(result)
2018 try: 2072
2019 self.results.put(result, timeout=0.25) 2073 if pending:
2020 except queue.Full: 2074 try:
2021 pending.append(result) 2075 result = pending.pop()
2076 self.results.put(result, timeout=0.05)
2077 except queue.Full:
2078 pending.append(result)
2079 finally:
2080 self.results.close()
2081 self.results.join_thread()
2022 2082
2023 def parse(self, mc, cache, filename, appends): 2083 def parse(self, mc, cache, filename, appends, layername):
2024 try: 2084 try:
2025 origfilter = bb.event.LogHandler.filter 2085 origfilter = bb.event.LogHandler.filter
2026 # Record the filename we're parsing into any events generated 2086 # Record the filename we're parsing into any events generated
@@ -2034,17 +2094,17 @@ class Parser(multiprocessing.Process):
2034 bb.event.set_class_handlers(self.handlers.copy()) 2094 bb.event.set_class_handlers(self.handlers.copy())
2035 bb.event.LogHandler.filter = parse_filter 2095 bb.event.LogHandler.filter = parse_filter
2036 2096
2037 return True, mc, cache.parse(filename, appends) 2097 return True, mc, cache.parse(filename, appends, layername)
2038 except Exception as exc: 2098 except Exception as exc:
2039 tb = sys.exc_info()[2] 2099 tb = sys.exc_info()[2]
2040 exc.recipe = filename 2100 exc.recipe = filename
2041 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) 2101 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3))
2042 return True, exc 2102 return True, None, exc
2043 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2103 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2044 # and for example a worker thread doesn't just exit on its own in response to 2104 # and for example a worker thread doesn't just exit on its own in response to
2045 # a SystemExit event for example. 2105 # a SystemExit event for example.
2046 except BaseException as exc: 2106 except BaseException as exc:
2047 return True, ParsingFailure(exc, filename) 2107 return True, None, ParsingFailure(exc, filename)
2048 finally: 2108 finally:
2049 bb.event.LogHandler.filter = origfilter 2109 bb.event.LogHandler.filter = origfilter
2050 2110
@@ -2074,10 +2134,11 @@ class CookerParser(object):
2074 for mc in self.cooker.multiconfigs: 2134 for mc in self.cooker.multiconfigs:
2075 for filename in self.mcfilelist[mc]: 2135 for filename in self.mcfilelist[mc]:
2076 appends = self.cooker.collections[mc].get_file_appends(filename) 2136 appends = self.cooker.collections[mc].get_file_appends(filename)
2137 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2077 if not self.bb_caches[mc].cacheValid(filename, appends): 2138 if not self.bb_caches[mc].cacheValid(filename, appends):
2078 self.willparse.add((mc, self.bb_caches[mc], filename, appends)) 2139 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2079 else: 2140 else:
2080 self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) 2141 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2081 2142
2082 self.total = len(self.fromcache) + len(self.willparse) 2143 self.total = len(self.fromcache) + len(self.willparse)
2083 self.toparse = len(self.willparse) 2144 self.toparse = len(self.willparse)
@@ -2086,6 +2147,7 @@ class CookerParser(object):
2086 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2147 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2087 multiprocessing.cpu_count()), self.toparse) 2148 multiprocessing.cpu_count()), self.toparse)
2088 2149
2150 bb.cache.SiggenRecipeInfo.reset()
2089 self.start() 2151 self.start()
2090 self.haveshutdown = False 2152 self.haveshutdown = False
2091 self.syncthread = None 2153 self.syncthread = None
@@ -2095,15 +2157,8 @@ class CookerParser(object):
2095 self.processes = [] 2157 self.processes = []
2096 if self.toparse: 2158 if self.toparse:
2097 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2159 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2098 def init(): 2160
2099 signal.signal(signal.SIGTERM, signal.SIG_DFL) 2161 self.parser_quit = multiprocessing.Event()
2100 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2101 signal.signal(signal.SIGINT, signal.SIG_IGN)
2102 bb.utils.set_process_name(multiprocessing.current_process().name)
2103 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2104 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2105
2106 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2107 self.result_queue = multiprocessing.Queue() 2162 self.result_queue = multiprocessing.Queue()
2108 2163
2109 def chunkify(lst,n): 2164 def chunkify(lst,n):
@@ -2111,14 +2166,14 @@ class CookerParser(object):
2111 self.jobs = chunkify(list(self.willparse), self.num_processes) 2166 self.jobs = chunkify(list(self.willparse), self.num_processes)
2112 2167
2113 for i in range(0, self.num_processes): 2168 for i in range(0, self.num_processes):
2114 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) 2169 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2115 parser.start() 2170 parser.start()
2116 self.process_names.append(parser.name) 2171 self.process_names.append(parser.name)
2117 self.processes.append(parser) 2172 self.processes.append(parser)
2118 2173
2119 self.results = itertools.chain(self.results, self.parse_generator()) 2174 self.results = itertools.chain(self.results, self.parse_generator())
2120 2175
2121 def shutdown(self, clean=True, force=False): 2176 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2122 if not self.toparse: 2177 if not self.toparse:
2123 return 2178 return
2124 if self.haveshutdown: 2179 if self.haveshutdown:
@@ -2132,9 +2187,9 @@ class CookerParser(object):
2132 self.total) 2187 self.total)
2133 2188
2134 bb.event.fire(event, self.cfgdata) 2189 bb.event.fire(event, self.cfgdata)
2135 2190 else:
2136 for process in self.processes: 2191 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2137 self.parser_quit.put(None) 2192 bb.error("Parsing halted due to errors, see error messages above")
2138 2193
2139 # Cleanup the queue before call process.join(), otherwise there might be 2194 # Cleanup the queue before call process.join(), otherwise there might be
2140 # deadlocks. 2195 # deadlocks.
@@ -2144,25 +2199,39 @@ class CookerParser(object):
2144 except queue.Empty: 2199 except queue.Empty:
2145 break 2200 break
2146 2201
2147 for process in self.processes:
2148 if force:
2149 process.join(.1)
2150 process.terminate()
2151 else:
2152 process.join()
2153
2154 self.parser_quit.close()
2155 # Allow data left in the cancel queue to be discarded
2156 self.parser_quit.cancel_join_thread()
2157
2158 def sync_caches(): 2202 def sync_caches():
2159 for c in self.bb_caches.values(): 2203 for c in self.bb_caches.values():
2204 bb.cache.SiggenRecipeInfo.reset()
2160 c.sync() 2205 c.sync()
2161 2206
2162 sync = threading.Thread(target=sync_caches, name="SyncThread") 2207 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2163 self.syncthread = sync 2208 self.syncthread.start()
2164 sync.start() 2209
2210 self.parser_quit.set()
2211
2212 for process in self.processes:
2213 process.join(0.5)
2214
2215 for process in self.processes:
2216 if process.exitcode is None:
2217 os.kill(process.pid, signal.SIGINT)
2218
2219 for process in self.processes:
2220 process.join(0.5)
2221
2222 for process in self.processes:
2223 if process.exitcode is None:
2224 process.terminate()
2225
2226 for process in self.processes:
2227 process.join()
2228 # Added in 3.7, cleans up zombies
2229 if hasattr(process, "close"):
2230 process.close()
2231
2232 bb.codeparser.parser_cache_save()
2165 bb.codeparser.parser_cache_savemerge() 2233 bb.codeparser.parser_cache_savemerge()
2234 bb.cache.SiggenRecipeInfo.reset()
2166 bb.fetch.fetcher_parse_done() 2235 bb.fetch.fetcher_parse_done()
2167 if self.cooker.configuration.profile: 2236 if self.cooker.configuration.profile:
2168 profiles = [] 2237 profiles = []
@@ -2180,49 +2249,64 @@ class CookerParser(object):
2180 self.syncthread.join() 2249 self.syncthread.join()
2181 2250
2182 def load_cached(self): 2251 def load_cached(self):
2183 for mc, cache, filename, appends in self.fromcache: 2252 for mc, cache, filename, appends, layername in self.fromcache:
2184 cached, infos = cache.load(filename, appends) 2253 infos = cache.loadCached(filename, appends)
2185 yield not cached, mc, infos 2254 yield False, mc, infos
2186 2255
2187 def parse_generator(self): 2256 def parse_generator(self):
2188 while True: 2257 empty = False
2258 while self.processes or not empty:
2259 for process in self.processes.copy():
2260 if not process.is_alive():
2261 process.join()
2262 self.processes.remove(process)
2263
2189 if self.parsed >= self.toparse: 2264 if self.parsed >= self.toparse:
2190 break 2265 break
2191 2266
2192 try: 2267 try:
2193 result = self.result_queue.get(timeout=0.25) 2268 result = self.result_queue.get(timeout=0.25)
2194 except queue.Empty: 2269 except queue.Empty:
2195 pass 2270 empty = True
2271 yield None, None, None
2196 else: 2272 else:
2197 value = result[1] 2273 empty = False
2198 if isinstance(value, BaseException): 2274 yield result
2199 raise value 2275
2200 else: 2276 if not (self.parsed >= self.toparse):
2201 yield result 2277 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2278
2202 2279
2203 def parse_next(self): 2280 def parse_next(self):
2204 result = [] 2281 result = []
2205 parsed = None 2282 parsed = None
2206 try: 2283 try:
2207 parsed, mc, result = next(self.results) 2284 parsed, mc, result = next(self.results)
2285 if isinstance(result, BaseException):
2286 # Turn exceptions back into exceptions
2287 raise result
2288 if parsed is None:
2289 # Timeout, loop back through the main loop
2290 return True
2291
2208 except StopIteration: 2292 except StopIteration:
2209 self.shutdown() 2293 self.shutdown()
2210 return False 2294 return False
2211 except bb.BBHandledException as exc: 2295 except bb.BBHandledException as exc:
2212 self.error += 1 2296 self.error += 1
2213 logger.error('Failed to parse recipe: %s' % exc.recipe) 2297 logger.debug('Failed to parse recipe: %s' % exc.recipe)
2214 self.shutdown(clean=False, force=True) 2298 self.shutdown(clean=False)
2215 return False 2299 return False
2216 except ParsingFailure as exc: 2300 except ParsingFailure as exc:
2217 self.error += 1 2301 self.error += 1
2218 logger.error('Unable to parse %s: %s' % 2302 logger.error('Unable to parse %s: %s' %
2219 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2303 (exc.recipe, bb.exceptions.to_string(exc.realexception)))
2220 self.shutdown(clean=False, force=True) 2304 self.shutdown(clean=False)
2221 return False 2305 return False
2222 except bb.parse.ParseError as exc: 2306 except bb.parse.ParseError as exc:
2223 self.error += 1 2307 self.error += 1
2224 logger.error(str(exc)) 2308 logger.error(str(exc))
2225 self.shutdown(clean=False, force=True) 2309 self.shutdown(clean=False, eventmsg=str(exc))
2226 return False 2310 return False
2227 except bb.data_smart.ExpansionError as exc: 2311 except bb.data_smart.ExpansionError as exc:
2228 self.error += 1 2312 self.error += 1
@@ -2231,7 +2315,7 @@ class CookerParser(object):
2231 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2315 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback))
2232 logger.error('ExpansionError during parsing %s', value.recipe, 2316 logger.error('ExpansionError during parsing %s', value.recipe,
2233 exc_info=(etype, value, tb)) 2317 exc_info=(etype, value, tb))
2234 self.shutdown(clean=False, force=True) 2318 self.shutdown(clean=False)
2235 return False 2319 return False
2236 except Exception as exc: 2320 except Exception as exc:
2237 self.error += 1 2321 self.error += 1
@@ -2243,7 +2327,7 @@ class CookerParser(object):
2243 # Most likely, an exception occurred during raising an exception 2327 # Most likely, an exception occurred during raising an exception
2244 import traceback 2328 import traceback
2245 logger.error('Exception during parse: %s' % traceback.format_exc()) 2329 logger.error('Exception during parse: %s' % traceback.format_exc())
2246 self.shutdown(clean=False, force=True) 2330 self.shutdown(clean=False)
2247 return False 2331 return False
2248 2332
2249 self.current += 1 2333 self.current += 1
@@ -2265,11 +2349,13 @@ class CookerParser(object):
2265 return True 2349 return True
2266 2350
2267 def reparse(self, filename): 2351 def reparse(self, filename):
2352 bb.cache.SiggenRecipeInfo.reset()
2268 to_reparse = set() 2353 to_reparse = set()
2269 for mc in self.cooker.multiconfigs: 2354 for mc in self.cooker.multiconfigs:
2270 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) 2355 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2356 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2271 2357
2272 for mc, filename, appends in to_reparse: 2358 for mc, filename, appends, layername in to_reparse:
2273 infos = self.bb_caches[mc].parse(filename, appends) 2359 infos = self.bb_caches[mc].parse(filename, appends, layername)
2274 for vfn, info_array in infos: 2360 for vfn, info_array in infos:
2275 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2361 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 1c1e008c6b..0649e40995 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -57,7 +57,7 @@ class ConfigParameters(object):
57 57
58 def updateToServer(self, server, environment): 58 def updateToServer(self, server, environment):
59 options = {} 59 options = {}
60 for o in ["abort", "force", "invalidate_stamp", 60 for o in ["halt", "force", "invalidate_stamp",
61 "dry_run", "dump_signatures", 61 "dry_run", "dump_signatures",
62 "extra_assume_provided", "profile", 62 "extra_assume_provided", "profile",
63 "prefile", "postfile", "server_timeout", 63 "prefile", "postfile", "server_timeout",
@@ -86,7 +86,7 @@ class ConfigParameters(object):
86 action['msg'] = "Only one target can be used with the --environment option." 86 action['msg'] = "Only one target can be used with the --environment option."
87 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: 87 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
88 action['msg'] = "No target should be used with the --environment and --buildfile options." 88 action['msg'] = "No target should be used with the --environment and --buildfile options."
89 elif len(self.options.pkgs_to_build) > 0: 89 elif self.options.pkgs_to_build:
90 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] 90 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
91 else: 91 else:
92 action['action'] = ["showEnvironment", self.options.buildfile] 92 action['action'] = ["showEnvironment", self.options.buildfile]
@@ -124,7 +124,7 @@ class CookerConfiguration(object):
124 self.prefile = [] 124 self.prefile = []
125 self.postfile = [] 125 self.postfile = []
126 self.cmd = None 126 self.cmd = None
127 self.abort = True 127 self.halt = True
128 self.force = False 128 self.force = False
129 self.profile = False 129 self.profile = False
130 self.nosetscene = False 130 self.nosetscene = False
@@ -160,12 +160,7 @@ def catch_parse_error(func):
160 def wrapped(fn, *args): 160 def wrapped(fn, *args):
161 try: 161 try:
162 return func(fn, *args) 162 return func(fn, *args)
163 except IOError as exc: 163 except Exception as exc:
164 import traceback
165 parselog.critical(traceback.format_exc())
166 parselog.critical("Unable to parse %s: %s" % (fn, exc))
167 raise bb.BBHandledException()
168 except bb.data_smart.ExpansionError as exc:
169 import traceback 164 import traceback
170 165
171 bbdir = os.path.dirname(__file__) + os.sep 166 bbdir = os.path.dirname(__file__) + os.sep
@@ -177,14 +172,11 @@ def catch_parse_error(func):
177 break 172 break
178 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb)) 173 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
179 raise bb.BBHandledException() 174 raise bb.BBHandledException()
180 except bb.parse.ParseError as exc:
181 parselog.critical(str(exc))
182 raise bb.BBHandledException()
183 return wrapped 175 return wrapped
184 176
185@catch_parse_error 177@catch_parse_error
186def parse_config_file(fn, data, include=True): 178def parse_config_file(fn, data, include=True):
187 return bb.parse.handle(fn, data, include) 179 return bb.parse.handle(fn, data, include, baseconfig=True)
188 180
189@catch_parse_error 181@catch_parse_error
190def _inherit(bbclass, data): 182def _inherit(bbclass, data):
@@ -210,7 +202,7 @@ def findConfigFile(configfile, data):
210 202
211# 203#
212# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working 204# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
213# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH. 205# up to /. If that fails, bitbake would fall back to cwd.
214# 206#
215 207
216def findTopdir(): 208def findTopdir():
@@ -223,11 +215,8 @@ def findTopdir():
223 layerconf = findConfigFile("bblayers.conf", d) 215 layerconf = findConfigFile("bblayers.conf", d)
224 if layerconf: 216 if layerconf:
225 return os.path.dirname(os.path.dirname(layerconf)) 217 return os.path.dirname(os.path.dirname(layerconf))
226 if bbpath: 218
227 bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf") 219 return os.path.abspath(os.getcwd())
228 if bitbakeconf:
229 return os.path.dirname(os.path.dirname(bitbakeconf))
230 return None
231 220
232class CookerDataBuilder(object): 221class CookerDataBuilder(object):
233 222
@@ -250,10 +239,14 @@ class CookerDataBuilder(object):
250 self.savedenv = bb.data.init() 239 self.savedenv = bb.data.init()
251 for k in cookercfg.env: 240 for k in cookercfg.env:
252 self.savedenv.setVar(k, cookercfg.env[k]) 241 self.savedenv.setVar(k, cookercfg.env[k])
242 if k in bb.data_smart.bitbake_renamed_vars:
243 bb.error('Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k]))
244 bb.fatal("Exiting to allow enviroment variables to be corrected")
253 245
254 filtered_keys = bb.utils.approved_variables() 246 filtered_keys = bb.utils.approved_variables()
255 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) 247 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
256 self.basedata.setVar("BB_ORIGENV", self.savedenv) 248 self.basedata.setVar("BB_ORIGENV", self.savedenv)
249 self.basedata.setVar("__bbclasstype", "global")
257 250
258 if worker: 251 if worker:
259 self.basedata.setVar("BB_WORKERCONTEXT", "1") 252 self.basedata.setVar("BB_WORKERCONTEXT", "1")
@@ -261,15 +254,15 @@ class CookerDataBuilder(object):
261 self.data = self.basedata 254 self.data = self.basedata
262 self.mcdata = {} 255 self.mcdata = {}
263 256
264 def parseBaseConfiguration(self): 257 def parseBaseConfiguration(self, worker=False):
258 mcdata = {}
265 data_hash = hashlib.sha256() 259 data_hash = hashlib.sha256()
266 try: 260 try:
267 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) 261 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
268 262
269 if self.data.getVar("BB_WORKERCONTEXT", False) is None: 263 if self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker:
270 bb.fetch.fetcher_init(self.data) 264 bb.fetch.fetcher_init(self.data)
271 bb.parse.init_parser(self.data) 265 bb.parse.init_parser(self.data)
272 bb.codeparser.parser_cache_init(self.data)
273 266
274 bb.event.fire(bb.event.ConfigParsed(), self.data) 267 bb.event.fire(bb.event.ConfigParsed(), self.data)
275 268
@@ -287,38 +280,62 @@ class CookerDataBuilder(object):
287 280
288 bb.parse.init_parser(self.data) 281 bb.parse.init_parser(self.data)
289 data_hash.update(self.data.get_hash().encode('utf-8')) 282 data_hash.update(self.data.get_hash().encode('utf-8'))
290 self.mcdata[''] = self.data 283 mcdata[''] = self.data
291 284
292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 285 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
293 for config in multiconfig: 286 for config in multiconfig:
294 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 287 if config[0].isdigit():
295 bb.event.fire(bb.event.ConfigParsed(), mcdata) 288 bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
296 self.mcdata[config] = mcdata 289 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
297 data_hash.update(mcdata.get_hash().encode('utf-8')) 290 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
291 mcdata[config] = parsed_mcdata
292 data_hash.update(parsed_mcdata.get_hash().encode('utf-8'))
298 if multiconfig: 293 if multiconfig:
299 bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data) 294 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
300 295
301 self.data_hash = data_hash.hexdigest() 296 self.data_hash = data_hash.hexdigest()
302 except (SyntaxError, bb.BBHandledException):
303 raise bb.BBHandledException()
304 except bb.data_smart.ExpansionError as e: 297 except bb.data_smart.ExpansionError as e:
305 logger.error(str(e)) 298 logger.error(str(e))
306 raise bb.BBHandledException() 299 raise bb.BBHandledException()
307 except Exception: 300
308 logger.exception("Error parsing configuration files") 301 bb.codeparser.update_module_dependencies(self.data)
302
303 # Handle obsolete variable names
304 d = self.data
305 renamedvars = d.getVarFlags('BB_RENAMED_VARIABLES') or {}
306 renamedvars.update(bb.data_smart.bitbake_renamed_vars)
307 issues = False
308 for v in renamedvars:
309 if d.getVar(v) != None or d.hasOverrides(v):
310 issues = True
311 loginfo = {}
312 history = d.varhistory.get_variable_refs(v)
313 for h in history:
314 for line in history[h]:
315 loginfo = {'file' : h, 'line' : line}
316 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
317 if not history:
318 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
319 if issues:
309 raise bb.BBHandledException() 320 raise bb.BBHandledException()
310 321
322 for mc in mcdata:
323 mcdata[mc].renameVar("__depends", "__base_depends")
324 mcdata[mc].setVar("__bbclasstype", "recipe")
325
311 # Create a copy so we can reset at a later date when UIs disconnect 326 # Create a copy so we can reset at a later date when UIs disconnect
312 self.origdata = self.data 327 self.mcorigdata = mcdata
313 self.data = bb.data.createCopy(self.origdata) 328 for mc in mcdata:
314 self.mcdata[''] = self.data 329 self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
330 self.data = self.mcdata['']
315 331
316 def reset(self): 332 def reset(self):
317 # We may not have run parseBaseConfiguration() yet 333 # We may not have run parseBaseConfiguration() yet
318 if not hasattr(self, 'origdata'): 334 if not hasattr(self, 'mcorigdata'):
319 return 335 return
320 self.data = bb.data.createCopy(self.origdata) 336 for mc in self.mcorigdata:
321 self.mcdata[''] = self.data 337 self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc])
338 self.data = self.mcdata['']
322 339
323 def _findLayerConf(self, data): 340 def _findLayerConf(self, data):
324 return findConfigFile("bblayers.conf", data) 341 return findConfigFile("bblayers.conf", data)
@@ -333,15 +350,23 @@ class CookerDataBuilder(object):
333 350
334 layerconf = self._findLayerConf(data) 351 layerconf = self._findLayerConf(data)
335 if layerconf: 352 if layerconf:
336 parselog.debug(2, "Found bblayers.conf (%s)", layerconf) 353 parselog.debug2("Found bblayers.conf (%s)", layerconf)
337 # By definition bblayers.conf is in conf/ of TOPDIR. 354 # By definition bblayers.conf is in conf/ of TOPDIR.
338 # We may have been called with cwd somewhere else so reset TOPDIR 355 # We may have been called with cwd somewhere else so reset TOPDIR
339 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) 356 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
340 data = parse_config_file(layerconf, data) 357 data = parse_config_file(layerconf, data)
341 358
359 if not data.getVar("BB_CACHEDIR"):
360 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
361
362 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
363
342 layers = (data.getVar('BBLAYERS') or "").split() 364 layers = (data.getVar('BBLAYERS') or "").split()
343 broken_layers = [] 365 broken_layers = []
344 366
367 if not layers:
368 bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
369
345 data = bb.data.createCopy(data) 370 data = bb.data.createCopy(data)
346 approved = bb.utils.approved_variables() 371 approved = bb.utils.approved_variables()
347 372
@@ -357,8 +382,10 @@ class CookerDataBuilder(object):
357 parselog.critical("Please check BBLAYERS in %s" % (layerconf)) 382 parselog.critical("Please check BBLAYERS in %s" % (layerconf))
358 raise bb.BBHandledException() 383 raise bb.BBHandledException()
359 384
385 layerseries = None
386 compat_entries = {}
360 for layer in layers: 387 for layer in layers:
361 parselog.debug(2, "Adding layer %s", layer) 388 parselog.debug2("Adding layer %s", layer)
362 if 'HOME' in approved and '~' in layer: 389 if 'HOME' in approved and '~' in layer:
363 layer = os.path.expanduser(layer) 390 layer = os.path.expanduser(layer)
364 if layer.endswith('/'): 391 if layer.endswith('/'):
@@ -369,8 +396,27 @@ class CookerDataBuilder(object):
369 data.expandVarref('LAYERDIR') 396 data.expandVarref('LAYERDIR')
370 data.expandVarref('LAYERDIR_RE') 397 data.expandVarref('LAYERDIR_RE')
371 398
399 # Sadly we can't have nice things.
400 # Some layers think they're going to be 'clever' and copy the values from
401 # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of
402 # this mechanism is to make it clear which releases a layer supports and
403 # show when a layer master branch is bitrotting and is unmaintained.
404 # We therefore avoid people doing this here.
405 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
406 for c in collections:
407 compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c)
408 if compat_entry:
409 compat_entries[c] = set(compat_entry.split())
410 data.delVar("LAYERSERIES_COMPAT_%s" % c)
411 if not layerseries:
412 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
413 if layerseries:
414 data.delVar("LAYERSERIES_CORENAMES")
415
372 data.delVar('LAYERDIR_RE') 416 data.delVar('LAYERDIR_RE')
373 data.delVar('LAYERDIR') 417 data.delVar('LAYERDIR')
418 for c in compat_entries:
419 data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c])))
374 420
375 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() 421 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split()
376 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() 422 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
@@ -389,26 +435,38 @@ class CookerDataBuilder(object):
389 if invalid: 435 if invalid:
390 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) 436 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
391 437
392 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
393 collections_tmp = collections[:] 438 collections_tmp = collections[:]
394 for c in collections: 439 for c in collections:
395 collections_tmp.remove(c) 440 collections_tmp.remove(c)
396 if c in collections_tmp: 441 if c in collections_tmp:
397 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) 442 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
398 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) 443
444 compat = set()
445 if c in compat_entries:
446 compat = compat_entries[c]
447 if compat and not layerseries:
448 bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
399 if compat and not (compat & layerseries): 449 if compat and not (compat & layerseries):
400 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" 450 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
401 % (c, " ".join(layerseries), " ".join(compat))) 451 % (c, " ".join(layerseries), " ".join(compat)))
402 elif not compat and not data.getVar("BB_WORKERCONTEXT"): 452 elif not compat and not data.getVar("BB_WORKERCONTEXT"):
403 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) 453 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
404 454
455 data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries)))
456
405 if not data.getVar("BBPATH"): 457 if not data.getVar("BBPATH"):
406 msg = "The BBPATH variable is not set" 458 msg = "The BBPATH variable is not set"
407 if not layerconf: 459 if not layerconf:
408 msg += (" and bitbake did not find a conf/bblayers.conf file in" 460 msg += (" and bitbake did not find a conf/bblayers.conf file in"
409 " the expected location.\nMaybe you accidentally" 461 " the expected location.\nMaybe you accidentally"
410 " invoked bitbake from the wrong directory?") 462 " invoked bitbake from the wrong directory?")
411 raise SystemExit(msg) 463 bb.fatal(msg)
464
465 if not data.getVar("TOPDIR"):
466 data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
467 if not data.getVar("BB_CACHEDIR"):
468 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
469 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
412 470
413 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) 471 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
414 472
@@ -421,7 +479,7 @@ class CookerDataBuilder(object):
421 for bbclass in bbclasses: 479 for bbclass in bbclasses:
422 data = _inherit(bbclass, data) 480 data = _inherit(bbclass, data)
423 481
424 # Nomally we only register event handlers at the end of parsing .bb files 482 # Normally we only register event handlers at the end of parsing .bb files
425 # We register any handlers we've found so far here... 483 # We register any handlers we've found so far here...
426 for var in data.getVar('__BBHANDLERS', False) or []: 484 for var in data.getVar('__BBHANDLERS', False) or []:
427 handlerfn = data.getVarFlag(var, "filename", False) 485 handlerfn = data.getVarFlag(var, "filename", False)
@@ -435,3 +493,54 @@ class CookerDataBuilder(object):
435 493
436 return data 494 return data
437 495
496 @staticmethod
497 def _parse_recipe(bb_data, bbfile, appends, mc, layername):
498 bb_data.setVar("__BBMULTICONFIG", mc)
499 bb_data.setVar("FILE_LAYERNAME", layername)
500
501 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
502 bb.parse.cached_mtime_noerror(bbfile_loc)
503
504 if appends:
505 bb_data.setVar('__BBAPPEND', " ".join(appends))
506
507 return bb.parse.handle(bbfile, bb_data)
508
509 def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
510 """
511 Load and parse one .bb build file
512 Return the data and whether parsing resulted in the file being skipped
513 """
514
515 if virtonly:
516 (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
517 bb_data = self.mcdata[mc].createCopy()
518 bb_data.setVar("__ONLYFINALISE", virtual or "default")
519 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
520
521 if mc is not None:
522 bb_data = self.mcdata[mc].createCopy()
523 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
524
525 bb_data = self.data.createCopy()
526 datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername)
527
528 for mc in self.mcdata:
529 if not mc:
530 continue
531 bb_data = self.mcdata[mc].createCopy()
532 newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
533 for ns in newstores:
534 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
535
536 return datastores
537
538 def parseRecipe(self, virtualfn, appends, layername):
539 """
540 Return a complete set of data for fn.
541 To do this, we need to parse the file.
542 """
543 logger.debug("Parsing %s (full)" % virtualfn)
544 (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
545 datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
546 return datastores[virtual]
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index c187fcfc6c..7689404436 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -74,26 +76,26 @@ def createDaemon(function, logfile):
74 with open('/dev/null', 'r') as si: 76 with open('/dev/null', 'r') as si:
75 os.dup2(si.fileno(), sys.stdin.fileno()) 77 os.dup2(si.fileno(), sys.stdin.fileno())
76 78
77 try: 79 with open(logfile, 'a+') as so:
78 so = open(logfile, 'a+') 80 try:
79 os.dup2(so.fileno(), sys.stdout.fileno()) 81 os.dup2(so.fileno(), sys.stdout.fileno())
80 os.dup2(so.fileno(), sys.stderr.fileno()) 82 os.dup2(so.fileno(), sys.stderr.fileno())
81 except io.UnsupportedOperation: 83 except io.UnsupportedOperation:
82 sys.stdout = open(logfile, 'a+') 84 sys.stdout = so
83 85
84 # Have stdout and stderr be the same so log output matches chronologically 86 # Have stdout and stderr be the same so log output matches chronologically
85 # and there aren't two seperate buffers 87 # and there aren't two separate buffers
86 sys.stderr = sys.stdout 88 sys.stderr = sys.stdout
87 89
88 try: 90 try:
89 function() 91 function()
90 except Exception as e: 92 except Exception as e:
91 traceback.print_exc() 93 traceback.print_exc()
92 finally: 94 finally:
93 bb.event.print_ui_queue() 95 bb.event.print_ui_queue()
94 # os._exit() doesn't flush open files like os.exit() does. Manually flush 96 # os._exit() doesn't flush open files like os.exit() does. Manually flush
95 # stdout and stderr so that any logging output will be seen, particularly 97 # stdout and stderr so that any logging output will be seen, particularly
96 # exception tracebacks. 98 # exception tracebacks.
97 sys.stdout.flush() 99 sys.stdout.flush()
98 sys.stderr.flush() 100 sys.stderr.flush()
99 os._exit(0) 101 os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 97022853ca..505f42950f 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
4Functions for interacting with the data structure used by the 4Functions for interacting with the data structure used by the
5BitBake build tools. 5BitBake build tools.
6 6
7The expandKeys and update_data are the most expensive 7expandKeys and datastore iteration are the most expensive
8operations. At night the cookie monster came by and 8operations. Updating overrides is now "on the fly" but still based
9on the idea of the cookie monster introduced by zecke:
10"At night the cookie monster came by and
9suggested 'give me cookies on setting the variables and 11suggested 'give me cookies on setting the variables and
10things will work out'. Taking this suggestion into account 12things will work out'. Taking this suggestion into account
11applying the skills from the not yet passed 'Entwurf und 13applying the skills from the not yet passed 'Entwurf und
12Analyse von Algorithmen' lecture and the cookie 14Analyse von Algorithmen' lecture and the cookie
13monster seems to be right. We will track setVar more carefully 15monster seems to be right. We will track setVar more carefully
14to have faster update_data and expandKeys operations. 16to have faster datastore operations."
15 17
16This is a trade-off between speed and memory again but 18This is a trade-off between speed and memory again but
17the speed is more critical here. 19the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
26 28
27import sys, os, re 29import sys, os, re
28import hashlib 30import hashlib
29if sys.argv[0][-5:] == "pydoc":
30 path = os.path.dirname(os.path.dirname(sys.argv[1]))
31else:
32 path = os.path.dirname(os.path.dirname(sys.argv[0]))
33sys.path.insert(0, path)
34from itertools import groupby 31from itertools import groupby
35 32
36from bb import data_smart 33from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
70 """Return a list of keys in d""" 67 """Return a list of keys in d"""
71 return d.keys() 68 return d.keys()
72 69
73
74__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
75__expand_python_regexp__ = re.compile(r"\${@.+?}")
76
77def expand(s, d, varname = None): 70def expand(s, d, varname = None):
78 """Variable expansion using the data store""" 71 """Variable expansion using the data store"""
79 return d.expand(s, varname) 72 return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
121 if d.getVarFlag(var, 'python', False) and func: 114 if d.getVarFlag(var, 'python', False) and func:
122 return False 115 return False
123 116
124 export = d.getVarFlag(var, "export", False) 117 export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
125 unexport = d.getVarFlag(var, "unexport", False) 118 unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
126 if not all and not export and not unexport and not func: 119 if not all and not export and not unexport and not func:
127 return False 120 return False
128 121
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
195 188
196def exported_keys(d): 189def exported_keys(d):
197 return (key for key in d.keys() if not key.startswith('__') and 190 return (key for key in d.keys() if not key.startswith('__') and
198 d.getVarFlag(key, 'export', False) and 191 bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
199 not d.getVarFlag(key, 'unexport', False)) 192 not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
200 193
201def exported_vars(d): 194def exported_vars(d):
202 k = list(exported_keys(d)) 195 k = list(exported_keys(d))
@@ -226,7 +219,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
226 deps = newdeps 219 deps = newdeps
227 seen |= deps 220 seen |= deps
228 newdeps = set() 221 newdeps = set()
229 for dep in deps: 222 for dep in sorted(deps):
230 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): 223 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
231 emit_var(dep, o, d, False) and o.write('\n') 224 emit_var(dep, o, d, False) and o.write('\n')
232 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) 225 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
268 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) 261 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
269 newdeps -= seen 262 newdeps -= seen
270 263
271def update_data(d): 264def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
272 """Performs final steps upon the datastore, including application of overrides""" 265 def handle_contains(value, contains, exclusions, d):
273 d.finalize(parent = True) 266 newvalue = []
267 if value:
268 newvalue.append(str(value))
269 for k in sorted(contains):
270 if k in exclusions or k in ignored_vars:
271 continue
272 l = (d.getVar(k) or "").split()
273 for item in sorted(contains[k]):
274 for word in item.split():
275 if not word in l:
276 newvalue.append("\n%s{%s} = Unset" % (k, item))
277 break
278 else:
279 newvalue.append("\n%s{%s} = Set" % (k, item))
280 return "".join(newvalue)
281
282 def handle_remove(value, deps, removes, d):
283 for r in sorted(removes):
284 r2 = d.expandWithRefs(r, None)
285 value += "\n_remove of %s" % r
286 deps |= r2.references
287 deps = deps | (keys & r2.execs)
288 value = handle_contains(value, r2.contains, exclusions, d)
289 return value
274 290
275def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
276 deps = set() 291 deps = set()
277 try: 292 try:
293 if key in mod_funcs:
294 exclusions = set()
295 moddep = bb.codeparser.modulecode_deps[key]
296 value = handle_contains("", moddep[3], exclusions, d)
297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
298
278 if key[-1] == ']': 299 if key[-1] == ']':
279 vf = key[:-1].split('[') 300 vf = key[:-1].split('[')
301 if vf[1] == "vardepvalueexclude":
302 return deps, ""
280 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True) 303 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
281 deps |= parser.references 304 deps |= parser.references
282 deps = deps | (keys & parser.execs) 305 deps = deps | (keys & parser.execs)
283 return deps, value 306 deps -= ignored_vars
307 return frozenset(deps), value
284 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {} 308 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
285 vardeps = varflags.get("vardeps") 309 vardeps = varflags.get("vardeps")
286 310 exclusions = varflags.get("vardepsexclude", "").split()
287 def handle_contains(value, contains, d):
288 newvalue = ""
289 for k in sorted(contains):
290 l = (d.getVar(k) or "").split()
291 for item in sorted(contains[k]):
292 for word in item.split():
293 if not word in l:
294 newvalue += "\n%s{%s} = Unset" % (k, item)
295 break
296 else:
297 newvalue += "\n%s{%s} = Set" % (k, item)
298 if not newvalue:
299 return value
300 if not value:
301 return newvalue
302 return value + newvalue
303
304 def handle_remove(value, deps, removes, d):
305 for r in sorted(removes):
306 r2 = d.expandWithRefs(r, None)
307 value += "\n_remove of %s" % r
308 deps |= r2.references
309 deps = deps | (keys & r2.execs)
310 return value
311 311
312 if "vardepvalue" in varflags: 312 if "vardepvalue" in varflags:
313 value = varflags.get("vardepvalue") 313 value = varflags.get("vardepvalue")
314 elif varflags.get("func"): 314 elif varflags.get("func"):
315 if varflags.get("python"): 315 if varflags.get("python"):
316 value = d.getVarFlag(key, "_content", False) 316 value = codeparsedata.getVarFlag(key, "_content", False)
317 parser = bb.codeparser.PythonParser(key, logger) 317 parser = bb.codeparser.PythonParser(key, logger)
318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) 318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
319 deps = deps | parser.references 319 deps = deps | parser.references
320 deps = deps | (keys & parser.execs) 320 deps = deps | (keys & parser.execs)
321 value = handle_contains(value, parser.contains, d) 321 value = handle_contains(value, parser.contains, exclusions, d)
322 else: 322 else:
323 value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True) 323 value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
324 parser = bb.codeparser.ShellParser(key, logger) 324 parser = bb.codeparser.ShellParser(key, logger)
325 parser.parse_shell(parsedvar.value) 325 parser.parse_shell(parsedvar.value)
326 deps = deps | shelldeps 326 deps = deps | shelldeps
327 deps = deps | parsedvar.references 327 deps = deps | parsedvar.references
328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) 328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
329 value = handle_contains(value, parsedvar.contains, d) 329 value = handle_contains(value, parsedvar.contains, exclusions, d)
330 if hasattr(parsedvar, "removes"): 330 if hasattr(parsedvar, "removes"):
331 value = handle_remove(value, deps, parsedvar.removes, d) 331 value = handle_remove(value, deps, parsedvar.removes, d)
332 if vardeps is None: 332 if vardeps is None:
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
341 value, parser = d.getVarFlag(key, "_content", False, retparser=True) 341 value, parser = d.getVarFlag(key, "_content", False, retparser=True)
342 deps |= parser.references 342 deps |= parser.references
343 deps = deps | (keys & parser.execs) 343 deps = deps | (keys & parser.execs)
344 value = handle_contains(value, parser.contains, d) 344 value = handle_contains(value, parser.contains, exclusions, d)
345 if hasattr(parser, "removes"): 345 if hasattr(parser, "removes"):
346 value = handle_remove(value, deps, parser.removes, d) 346 value = handle_remove(value, deps, parser.removes, d)
347 347
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
361 deps |= set(varfdeps) 361 deps |= set(varfdeps)
362 362
363 deps |= set((vardeps or "").split()) 363 deps |= set((vardeps or "").split())
364 deps -= set(varflags.get("vardepsexclude", "").split()) 364 deps -= set(exclusions)
365 deps -= ignored_vars
365 except bb.parse.SkipRecipe: 366 except bb.parse.SkipRecipe:
366 raise 367 raise
367 except Exception as e: 368 except Exception as e:
368 bb.warn("Exception during build_dependencies for %s" % key) 369 bb.warn("Exception during build_dependencies for %s" % key)
369 raise 370 raise
370 return deps, value 371 return frozenset(deps), value
371 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) 372 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
372 #d.setVarFlag(key, "vardeps", deps) 373 #d.setVarFlag(key, "vardeps", deps)
373 374
374def generate_dependencies(d, whitelist): 375def generate_dependencies(d, ignored_vars):
375 376
376 keys = set(key for key in d if not key.startswith("__")) 377 mod_funcs = set(bb.codeparser.modulecode_deps.keys())
377 shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) 378 keys = set(key for key in d if not key.startswith("__")) | mod_funcs
379 shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
378 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') 380 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
379 381
382 codeparserd = d.createCopy()
383 for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
384 key, value = forced.split("=", 1)
385 codeparserd.setVar(key, value)
386
380 deps = {} 387 deps = {}
381 values = {} 388 values = {}
382 389
383 tasklist = d.getVar('__BBTASKS', False) or [] 390 tasklist = d.getVar('__BBTASKS', False) or []
384 for task in tasklist: 391 for task in tasklist:
385 deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d) 392 deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
386 newdeps = deps[task] 393 newdeps = deps[task]
387 seen = set() 394 seen = set()
388 while newdeps: 395 while newdeps:
389 nextdeps = newdeps - whitelist 396 nextdeps = newdeps
390 seen |= nextdeps 397 seen |= nextdeps
391 newdeps = set() 398 newdeps = set()
392 for dep in nextdeps: 399 for dep in nextdeps:
393 if dep not in deps: 400 if dep not in deps:
394 deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d) 401 deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
395 newdeps |= deps[dep] 402 newdeps |= deps[dep]
396 newdeps -= seen 403 newdeps -= seen
397 #print "For %s: %s" % (task, str(deps[task])) 404 #print "For %s: %s" % (task, str(deps[task]))
398 return tasklist, deps, values 405 return tasklist, deps, values
399 406
400def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): 407def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
401 taskdeps = {} 408 taskdeps = {}
402 basehash = {} 409 basehash = {}
403 410
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
406 413
407 if data is None: 414 if data is None:
408 bb.error("Task %s from %s seems to be empty?!" % (task, fn)) 415 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
409 data = '' 416 data = []
417 else:
418 data = [data]
410 419
411 gendeps[task] -= whitelist
412 newdeps = gendeps[task] 420 newdeps = gendeps[task]
413 seen = set() 421 seen = set()
414 while newdeps: 422 while newdeps:
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
416 seen |= nextdeps 424 seen |= nextdeps
417 newdeps = set() 425 newdeps = set()
418 for dep in nextdeps: 426 for dep in nextdeps:
419 if dep in whitelist:
420 continue
421 gendeps[dep] -= whitelist
422 newdeps |= gendeps[dep] 427 newdeps |= gendeps[dep]
423 newdeps -= seen 428 newdeps -= seen
424 429
425 alldeps = sorted(seen) 430 alldeps = sorted(seen)
426 for dep in alldeps: 431 for dep in alldeps:
427 data = data + dep 432 data.append(dep)
428 var = lookupcache[dep] 433 var = lookupcache[dep]
429 if var is not None: 434 if var is not None:
430 data = data + str(var) 435 data.append(str(var))
431 k = fn + ":" + task 436 k = fn + ":" + task
432 basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() 437 basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
433 taskdeps[task] = alldeps 438 taskdeps[task] = frozenset(seen)
434 439
435 return taskdeps, basehash 440 return taskdeps, basehash
436 441
437def inherits_class(klass, d): 442def inherits_class(klass, d):
438 val = d.getVar('__inherit_cache', False) or [] 443 val = d.getVar('__inherit_cache', False) or []
439 needle = os.path.join('classes', '%s.bbclass' % klass) 444 needle = '/%s.bbclass' % klass
440 for v in val: 445 for v in val:
441 if v.endswith(needle): 446 if v.endswith(needle):
442 return True 447 return True
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 2328c334ac..0128a5bb17 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -16,8 +16,11 @@ BitBake build tools.
16# 16#
17# Based on functions from the base bb module, Copyright 2003 Holger Schurig 17# Based on functions from the base bb module, Copyright 2003 Holger Schurig
18 18
19import copy, re, sys, traceback 19import builtins
20from collections import MutableMapping 20import copy
21import re
22import sys
23from collections.abc import MutableMapping
21import logging 24import logging
22import hashlib 25import hashlib
23import bb, bb.codeparser 26import bb, bb.codeparser
@@ -26,13 +29,25 @@ from bb.COW import COWDictBase
26 29
27logger = logging.getLogger("BitBake.Data") 30logger = logging.getLogger("BitBake.Data")
28 31
29__setvar_keyword__ = ["_append", "_prepend", "_remove"] 32__setvar_keyword__ = [":append", ":prepend", ":remove"]
30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') 33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") 34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+?}")
32__expand_python_regexp__ = re.compile(r"\${@.+?}") 35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
33__whitespace_split__ = re.compile(r'(\s)') 36__whitespace_split__ = re.compile(r'(\s)')
34__override_regexp__ = re.compile(r'[a-z0-9]+') 37__override_regexp__ = re.compile(r'[a-z0-9]+')
35 38
39bitbake_renamed_vars = {
40 "BB_ENV_WHITELIST": "BB_ENV_PASSTHROUGH",
41 "BB_ENV_EXTRAWHITE": "BB_ENV_PASSTHROUGH_ADDITIONS",
42 "BB_HASHBASE_WHITELIST": "BB_BASEHASH_IGNORE_VARS",
43 "BB_HASHCONFIG_WHITELIST": "BB_HASHCONFIG_IGNORE_VARS",
44 "BB_HASHTASK_WHITELIST": "BB_TASKHASH_IGNORE_TASKS",
45 "BB_SETSCENE_ENFORCE_WHITELIST": "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
46 "MULTI_PROVIDER_WHITELIST": "BB_MULTI_PROVIDER_ALLOWED",
47 "BB_STAMP_WHITELIST": "is a deprecated variable and support has been removed",
48 "BB_STAMP_POLICY": "is a deprecated variable and support has been removed",
49}
50
36def infer_caller_details(loginfo, parent = False, varval = True): 51def infer_caller_details(loginfo, parent = False, varval = True):
37 """Save the caller the trouble of specifying everything.""" 52 """Save the caller the trouble of specifying everything."""
38 # Save effort. 53 # Save effort.
@@ -80,10 +95,11 @@ def infer_caller_details(loginfo, parent = False, varval = True):
80 loginfo['func'] = func 95 loginfo['func'] = func
81 96
82class VariableParse: 97class VariableParse:
83 def __init__(self, varname, d, val = None): 98 def __init__(self, varname, d, unexpanded_value = None, val = None):
84 self.varname = varname 99 self.varname = varname
85 self.d = d 100 self.d = d
86 self.value = val 101 self.value = val
102 self.unexpanded_value = unexpanded_value
87 103
88 self.references = set() 104 self.references = set()
89 self.execs = set() 105 self.execs = set()
@@ -107,6 +123,11 @@ class VariableParse:
107 else: 123 else:
108 code = match.group()[3:-1] 124 code = match.group()[3:-1]
109 125
126 # Do not run code that contains one or more unexpanded variables
127 # instead return the code with the characters we removed put back
128 if __expand_var_regexp__.findall(code):
129 return "${@" + code + "}"
130
110 if self.varname: 131 if self.varname:
111 varname = 'Var <%s>' % self.varname 132 varname = 'Var <%s>' % self.varname
112 else: 133 else:
@@ -132,16 +153,21 @@ class VariableParse:
132 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) 153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
133 return str(value) 154 return str(value)
134 155
135
136class DataContext(dict): 156class DataContext(dict):
157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
158
137 def __init__(self, metadata, **kwargs): 159 def __init__(self, metadata, **kwargs):
138 self.metadata = metadata 160 self.metadata = metadata
139 dict.__init__(self, **kwargs) 161 dict.__init__(self, **kwargs)
140 self['d'] = metadata 162 self['d'] = metadata
163 self.context = set(bb.utils.get_context())
141 164
142 def __missing__(self, key): 165 def __missing__(self, key):
166 if key in self.excluded or key in self.context:
167 raise KeyError(key)
168
143 value = self.metadata.getVar(key) 169 value = self.metadata.getVar(key)
144 if value is None or self.metadata.getVarFlag(key, 'func', False): 170 if value is None:
145 raise KeyError(key) 171 raise KeyError(key)
146 else: 172 else:
147 return value 173 return value
@@ -151,6 +177,7 @@ class ExpansionError(Exception):
151 self.expression = expression 177 self.expression = expression
152 self.variablename = varname 178 self.variablename = varname
153 self.exception = exception 179 self.exception = exception
180 self.varlist = [varname or expression or ""]
154 if varname: 181 if varname:
155 if expression: 182 if expression:
156 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) 183 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
@@ -160,8 +187,14 @@ class ExpansionError(Exception):
160 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) 187 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
161 Exception.__init__(self, self.msg) 188 Exception.__init__(self, self.msg)
162 self.args = (varname, expression, exception) 189 self.args = (varname, expression, exception)
190
191 def addVar(self, varname):
192 if varname:
193 self.varlist.append(varname)
194
163 def __str__(self): 195 def __str__(self):
164 return self.msg 196 chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist)
197 return self.msg + chain
165 198
166class IncludeHistory(object): 199class IncludeHistory(object):
167 def __init__(self, parent = None, filename = '[TOP LEVEL]'): 200 def __init__(self, parent = None, filename = '[TOP LEVEL]'):
@@ -277,7 +310,7 @@ class VariableHistory(object):
277 for (r, override) in d.overridedata[var]: 310 for (r, override) in d.overridedata[var]:
278 for event in self.variable(r): 311 for event in self.variable(r):
279 loginfo = event.copy() 312 loginfo = event.copy()
280 if 'flag' in loginfo and not loginfo['flag'].startswith("_"): 313 if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")):
281 continue 314 continue
282 loginfo['variable'] = var 315 loginfo['variable'] = var
283 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op']) 316 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
@@ -329,6 +362,16 @@ class VariableHistory(object):
329 lines.append(line) 362 lines.append(line)
330 return lines 363 return lines
331 364
365 def get_variable_refs(self, var):
366 """Return a dict of file/line references"""
367 var_history = self.variable(var)
368 refs = {}
369 for event in var_history:
370 if event['file'] not in refs:
371 refs[event['file']] = []
372 refs[event['file']].append(event['line'])
373 return refs
374
332 def get_variable_items_files(self, var): 375 def get_variable_items_files(self, var):
333 """ 376 """
334 Use variable history to map items added to a list variable and 377 Use variable history to map items added to a list variable and
@@ -342,7 +385,7 @@ class VariableHistory(object):
342 for event in history: 385 for event in history:
343 if 'flag' in event: 386 if 'flag' in event:
344 continue 387 continue
345 if event['op'] == '_remove': 388 if event['op'] == ':remove':
346 continue 389 continue
347 if isset and event['op'] == 'set?': 390 if isset and event['op'] == 'set?':
348 continue 391 continue
@@ -363,6 +406,23 @@ class VariableHistory(object):
363 else: 406 else:
364 self.variables[var] = [] 407 self.variables[var] = []
365 408
409def _print_rename_error(var, loginfo, renamedvars, fullvar=None):
410 info = ""
411 if "file" in loginfo:
412 info = " file: %s" % loginfo["file"]
413 if "line" in loginfo:
414 info += " line: %s" % loginfo["line"]
415 if fullvar and fullvar != var:
416 info += " referenced as: %s" % fullvar
417 if info:
418 info = " (%s)" % info.strip()
419 renameinfo = renamedvars[var]
420 if " " in renameinfo:
421 # A space signals a string to display instead of a rename
422 bb.erroronce('Variable %s %s%s' % (var, renameinfo, info))
423 else:
424 bb.erroronce('Variable %s has been renamed to %s%s' % (var, renameinfo, info))
425
366class DataSmart(MutableMapping): 426class DataSmart(MutableMapping):
367 def __init__(self): 427 def __init__(self):
368 self.dict = {} 428 self.dict = {}
@@ -370,6 +430,8 @@ class DataSmart(MutableMapping):
370 self.inchistory = IncludeHistory() 430 self.inchistory = IncludeHistory()
371 self.varhistory = VariableHistory(self) 431 self.varhistory = VariableHistory(self)
372 self._tracking = False 432 self._tracking = False
433 self._var_renames = {}
434 self._var_renames.update(bitbake_renamed_vars)
373 435
374 self.expand_cache = {} 436 self.expand_cache = {}
375 437
@@ -391,9 +453,9 @@ class DataSmart(MutableMapping):
391 def expandWithRefs(self, s, varname): 453 def expandWithRefs(self, s, varname):
392 454
393 if not isinstance(s, str): # sanity check 455 if not isinstance(s, str): # sanity check
394 return VariableParse(varname, self, s) 456 return VariableParse(varname, self, s, s)
395 457
396 varparse = VariableParse(varname, self) 458 varparse = VariableParse(varname, self, s)
397 459
398 while s.find('${') != -1: 460 while s.find('${') != -1:
399 olds = s 461 olds = s
@@ -403,14 +465,17 @@ class DataSmart(MutableMapping):
403 s = __expand_python_regexp__.sub(varparse.python_sub, s) 465 s = __expand_python_regexp__.sub(varparse.python_sub, s)
404 except SyntaxError as e: 466 except SyntaxError as e:
405 # Likely unmatched brackets, just don't expand the expression 467 # Likely unmatched brackets, just don't expand the expression
406 if e.msg != "EOL while scanning string literal": 468 if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
407 raise 469 raise
408 if s == olds: 470 if s == olds:
409 break 471 break
410 except ExpansionError: 472 except ExpansionError as e:
473 e.addVar(varname)
411 raise 474 raise
412 except bb.parse.SkipRecipe: 475 except bb.parse.SkipRecipe:
413 raise 476 raise
477 except bb.BBHandledException:
478 raise
414 except Exception as exc: 479 except Exception as exc:
415 tb = sys.exc_info()[2] 480 tb = sys.exc_info()[2]
416 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc 481 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -422,24 +487,19 @@ class DataSmart(MutableMapping):
422 def expand(self, s, varname = None): 487 def expand(self, s, varname = None):
423 return self.expandWithRefs(s, varname).value 488 return self.expandWithRefs(s, varname).value
424 489
425 def finalize(self, parent = False):
426 return
427
428 def internal_finalize(self, parent = False):
429 """Performs final steps upon the datastore, including application of overrides"""
430 self.overrides = None
431
432 def need_overrides(self): 490 def need_overrides(self):
433 if self.overrides is not None: 491 if self.overrides is not None:
434 return 492 return
435 if self.inoverride: 493 if self.inoverride:
436 return 494 return
495 overrride_stack = []
437 for count in range(5): 496 for count in range(5):
438 self.inoverride = True 497 self.inoverride = True
439 # Can end up here recursively so setup dummy values 498 # Can end up here recursively so setup dummy values
440 self.overrides = [] 499 self.overrides = []
441 self.overridesset = set() 500 self.overridesset = set()
442 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] 501 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
502 overrride_stack.append(self.overrides)
443 self.overridesset = set(self.overrides) 503 self.overridesset = set(self.overrides)
444 self.inoverride = False 504 self.inoverride = False
445 self.expand_cache = {} 505 self.expand_cache = {}
@@ -449,7 +509,7 @@ class DataSmart(MutableMapping):
449 self.overrides = newoverrides 509 self.overrides = newoverrides
450 self.overridesset = set(self.overrides) 510 self.overridesset = set(self.overrides)
451 else: 511 else:
452 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.") 512 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack))
453 513
454 def initVar(self, var): 514 def initVar(self, var):
455 self.expand_cache = {} 515 self.expand_cache = {}
@@ -460,27 +520,44 @@ class DataSmart(MutableMapping):
460 dest = self.dict 520 dest = self.dict
461 while dest: 521 while dest:
462 if var in dest: 522 if var in dest:
463 return dest[var], self.overridedata.get(var, None) 523 return dest[var]
464 524
465 if "_data" not in dest: 525 if "_data" not in dest:
466 break 526 break
467 dest = dest["_data"] 527 dest = dest["_data"]
468 return None, self.overridedata.get(var, None) 528 return None
469 529
470 def _makeShadowCopy(self, var): 530 def _makeShadowCopy(self, var):
471 if var in self.dict: 531 if var in self.dict:
472 return 532 return
473 533
474 local_var, _ = self._findVar(var) 534 local_var = self._findVar(var)
475 535
476 if local_var: 536 if local_var:
477 self.dict[var] = copy.copy(local_var) 537 self.dict[var] = copy.copy(local_var)
478 else: 538 else:
479 self.initVar(var) 539 self.initVar(var)
480 540
541 def hasOverrides(self, var):
542 return var in self.overridedata
481 543
482 def setVar(self, var, value, **loginfo): 544 def setVar(self, var, value, **loginfo):
483 #print("var=" + str(var) + " val=" + str(value)) 545 #print("var=" + str(var) + " val=" + str(value))
546
547 if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var):
548 info = "%s" % var
549 if "file" in loginfo:
550 info += " file: %s" % loginfo["file"]
551 if "line" in loginfo:
552 info += " line: %s" % loginfo["line"]
553 bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info)
554
555 shortvar = var.split(":", 1)[0]
556 if shortvar in self._var_renames:
557 _print_rename_error(shortvar, loginfo, self._var_renames, fullvar=var)
558 # Mark that we have seen a renamed variable
559 self.setVar("_FAILPARSINGERRORHANDLED", True)
560
484 self.expand_cache = {} 561 self.expand_cache = {}
485 parsing=False 562 parsing=False
486 if 'parsing' in loginfo: 563 if 'parsing' in loginfo:
@@ -509,7 +586,7 @@ class DataSmart(MutableMapping):
509 # pay the cookie monster 586 # pay the cookie monster
510 587
511 # more cookies for the cookie monster 588 # more cookies for the cookie monster
512 if '_' in var: 589 if ':' in var:
513 self._setvar_update_overrides(base, **loginfo) 590 self._setvar_update_overrides(base, **loginfo)
514 591
515 if base in self.overridevars: 592 if base in self.overridevars:
@@ -520,27 +597,27 @@ class DataSmart(MutableMapping):
520 self._makeShadowCopy(var) 597 self._makeShadowCopy(var)
521 598
522 if not parsing: 599 if not parsing:
523 if "_append" in self.dict[var]: 600 if ":append" in self.dict[var]:
524 del self.dict[var]["_append"] 601 del self.dict[var][":append"]
525 if "_prepend" in self.dict[var]: 602 if ":prepend" in self.dict[var]:
526 del self.dict[var]["_prepend"] 603 del self.dict[var][":prepend"]
527 if "_remove" in self.dict[var]: 604 if ":remove" in self.dict[var]:
528 del self.dict[var]["_remove"] 605 del self.dict[var][":remove"]
529 if var in self.overridedata: 606 if var in self.overridedata:
530 active = [] 607 active = []
531 self.need_overrides() 608 self.need_overrides()
532 for (r, o) in self.overridedata[var]: 609 for (r, o) in self.overridedata[var]:
533 if o in self.overridesset: 610 if o in self.overridesset:
534 active.append(r) 611 active.append(r)
535 elif "_" in o: 612 elif ":" in o:
536 if set(o.split("_")).issubset(self.overridesset): 613 if set(o.split(":")).issubset(self.overridesset):
537 active.append(r) 614 active.append(r)
538 for a in active: 615 for a in active:
539 self.delVar(a) 616 self.delVar(a)
540 del self.overridedata[var] 617 del self.overridedata[var]
541 618
542 # more cookies for the cookie monster 619 # more cookies for the cookie monster
543 if '_' in var: 620 if ':' in var:
544 self._setvar_update_overrides(var, **loginfo) 621 self._setvar_update_overrides(var, **loginfo)
545 622
546 # setting var 623 # setting var
@@ -562,12 +639,12 @@ class DataSmart(MutableMapping):
562 nextnew.update(vardata.references) 639 nextnew.update(vardata.references)
563 nextnew.update(vardata.contains.keys()) 640 nextnew.update(vardata.contains.keys())
564 new = nextnew 641 new = nextnew
565 self.internal_finalize(True) 642 self.overrides = None
566 643
567 def _setvar_update_overrides(self, var, **loginfo): 644 def _setvar_update_overrides(self, var, **loginfo):
568 # aka pay the cookie monster 645 # aka pay the cookie monster
569 override = var[var.rfind('_')+1:] 646 override = var[var.rfind(':')+1:]
570 shortvar = var[:var.rfind('_')] 647 shortvar = var[:var.rfind(':')]
571 while override and __override_regexp__.match(override): 648 while override and __override_regexp__.match(override):
572 if shortvar not in self.overridedata: 649 if shortvar not in self.overridedata:
573 self.overridedata[shortvar] = [] 650 self.overridedata[shortvar] = []
@@ -576,9 +653,9 @@ class DataSmart(MutableMapping):
576 self.overridedata[shortvar] = list(self.overridedata[shortvar]) 653 self.overridedata[shortvar] = list(self.overridedata[shortvar])
577 self.overridedata[shortvar].append([var, override]) 654 self.overridedata[shortvar].append([var, override])
578 override = None 655 override = None
579 if "_" in shortvar: 656 if ":" in shortvar:
580 override = var[shortvar.rfind('_')+1:] 657 override = var[shortvar.rfind(':')+1:]
581 shortvar = var[:shortvar.rfind('_')] 658 shortvar = var[:shortvar.rfind(':')]
582 if len(shortvar) == 0: 659 if len(shortvar) == 0:
583 override = None 660 override = None
584 661
@@ -602,10 +679,11 @@ class DataSmart(MutableMapping):
602 self.varhistory.record(**loginfo) 679 self.varhistory.record(**loginfo)
603 self.setVar(newkey, val, ignore=True, parsing=True) 680 self.setVar(newkey, val, ignore=True, parsing=True)
604 681
605 for i in (__setvar_keyword__): 682 srcflags = self.getVarFlags(key, False, True) or {}
606 src = self.getVarFlag(key, i, False) 683 for i in srcflags:
607 if src is None: 684 if i not in (__setvar_keyword__):
608 continue 685 continue
686 src = srcflags[i]
609 687
610 dest = self.getVarFlag(newkey, i, False) or [] 688 dest = self.getVarFlag(newkey, i, False) or []
611 dest.extend(src) 689 dest.extend(src)
@@ -617,7 +695,7 @@ class DataSmart(MutableMapping):
617 self.overridedata[newkey].append([v.replace(key, newkey), o]) 695 self.overridedata[newkey].append([v.replace(key, newkey), o])
618 self.renameVar(v, v.replace(key, newkey)) 696 self.renameVar(v, v.replace(key, newkey))
619 697
620 if '_' in newkey and val is None: 698 if ':' in newkey and val is None:
621 self._setvar_update_overrides(newkey, **loginfo) 699 self._setvar_update_overrides(newkey, **loginfo)
622 700
623 loginfo['variable'] = key 701 loginfo['variable'] = key
@@ -629,12 +707,12 @@ class DataSmart(MutableMapping):
629 def appendVar(self, var, value, **loginfo): 707 def appendVar(self, var, value, **loginfo):
630 loginfo['op'] = 'append' 708 loginfo['op'] = 'append'
631 self.varhistory.record(**loginfo) 709 self.varhistory.record(**loginfo)
632 self.setVar(var + "_append", value, ignore=True, parsing=True) 710 self.setVar(var + ":append", value, ignore=True, parsing=True)
633 711
634 def prependVar(self, var, value, **loginfo): 712 def prependVar(self, var, value, **loginfo):
635 loginfo['op'] = 'prepend' 713 loginfo['op'] = 'prepend'
636 self.varhistory.record(**loginfo) 714 self.varhistory.record(**loginfo)
637 self.setVar(var + "_prepend", value, ignore=True, parsing=True) 715 self.setVar(var + ":prepend", value, ignore=True, parsing=True)
638 716
639 def delVar(self, var, **loginfo): 717 def delVar(self, var, **loginfo):
640 self.expand_cache = {} 718 self.expand_cache = {}
@@ -645,10 +723,10 @@ class DataSmart(MutableMapping):
645 self.dict[var] = {} 723 self.dict[var] = {}
646 if var in self.overridedata: 724 if var in self.overridedata:
647 del self.overridedata[var] 725 del self.overridedata[var]
648 if '_' in var: 726 if ':' in var:
649 override = var[var.rfind('_')+1:] 727 override = var[var.rfind(':')+1:]
650 shortvar = var[:var.rfind('_')] 728 shortvar = var[:var.rfind(':')]
651 while override and override.islower(): 729 while override and __override_regexp__.match(override):
652 try: 730 try:
653 if shortvar in self.overridedata: 731 if shortvar in self.overridedata:
654 # Force CoW by recreating the list first 732 # Force CoW by recreating the list first
@@ -657,15 +735,23 @@ class DataSmart(MutableMapping):
657 except ValueError as e: 735 except ValueError as e:
658 pass 736 pass
659 override = None 737 override = None
660 if "_" in shortvar: 738 if ":" in shortvar:
661 override = var[shortvar.rfind('_')+1:] 739 override = var[shortvar.rfind(':')+1:]
662 shortvar = var[:shortvar.rfind('_')] 740 shortvar = var[:shortvar.rfind(':')]
663 if len(shortvar) == 0: 741 if len(shortvar) == 0:
664 override = None 742 override = None
665 743
666 def setVarFlag(self, var, flag, value, **loginfo): 744 def setVarFlag(self, var, flag, value, **loginfo):
667 self.expand_cache = {} 745 self.expand_cache = {}
668 746
747 if var == "BB_RENAMED_VARIABLES":
748 self._var_renames[flag] = value
749
750 if var in self._var_renames:
751 _print_rename_error(var, loginfo, self._var_renames)
752 # Mark that we have seen a renamed variable
753 self.setVar("_FAILPARSINGERRORHANDLED", True)
754
669 if 'op' not in loginfo: 755 if 'op' not in loginfo:
670 loginfo['op'] = "set" 756 loginfo['op'] = "set"
671 loginfo['flag'] = flag 757 loginfo['flag'] = flag
@@ -674,7 +760,7 @@ class DataSmart(MutableMapping):
674 self._makeShadowCopy(var) 760 self._makeShadowCopy(var)
675 self.dict[var][flag] = value 761 self.dict[var][flag] = value
676 762
677 if flag == "_defaultval" and '_' in var: 763 if flag == "_defaultval" and ':' in var:
678 self._setvar_update_overrides(var, **loginfo) 764 self._setvar_update_overrides(var, **loginfo)
679 if flag == "_defaultval" and var in self.overridevars: 765 if flag == "_defaultval" and var in self.overridevars:
680 self._setvar_update_overridevars(var, value) 766 self._setvar_update_overridevars(var, value)
@@ -695,22 +781,27 @@ class DataSmart(MutableMapping):
695 return None 781 return None
696 cachename = var + "[" + flag + "]" 782 cachename = var + "[" + flag + "]"
697 783
784 if not expand and retparser and cachename in self.expand_cache:
785 return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename]
786
698 if expand and cachename in self.expand_cache: 787 if expand and cachename in self.expand_cache:
699 return self.expand_cache[cachename].value 788 return self.expand_cache[cachename].value
700 789
701 local_var, overridedata = self._findVar(var) 790 local_var = self._findVar(var)
702 value = None 791 value = None
703 removes = set() 792 removes = set()
704 if flag == "_content" and overridedata is not None and not parsing: 793 if flag == "_content" and not parsing:
794 overridedata = self.overridedata.get(var, None)
795 if flag == "_content" and not parsing and overridedata is not None:
705 match = False 796 match = False
706 active = {} 797 active = {}
707 self.need_overrides() 798 self.need_overrides()
708 for (r, o) in overridedata: 799 for (r, o) in overridedata:
709 # What about double overrides both with "_" in the name? 800 # FIXME What about double overrides both with "_" in the name?
710 if o in self.overridesset: 801 if o in self.overridesset:
711 active[o] = r 802 active[o] = r
712 elif "_" in o: 803 elif ":" in o:
713 if set(o.split("_")).issubset(self.overridesset): 804 if set(o.split(":")).issubset(self.overridesset):
714 active[o] = r 805 active[o] = r
715 806
716 mod = True 807 mod = True
@@ -718,10 +809,10 @@ class DataSmart(MutableMapping):
718 mod = False 809 mod = False
719 for o in self.overrides: 810 for o in self.overrides:
720 for a in active.copy(): 811 for a in active.copy():
721 if a.endswith("_" + o): 812 if a.endswith(":" + o):
722 t = active[a] 813 t = active[a]
723 del active[a] 814 del active[a]
724 active[a.replace("_" + o, "")] = t 815 active[a.replace(":" + o, "")] = t
725 mod = True 816 mod = True
726 elif a == o: 817 elif a == o:
727 match = active[a] 818 match = active[a]
@@ -740,31 +831,31 @@ class DataSmart(MutableMapping):
740 value = copy.copy(local_var["_defaultval"]) 831 value = copy.copy(local_var["_defaultval"])
741 832
742 833
743 if flag == "_content" and local_var is not None and "_append" in local_var and not parsing: 834 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
744 if not value:
745 value = ""
746 self.need_overrides() 835 self.need_overrides()
747 for (r, o) in local_var["_append"]: 836 for (r, o) in local_var[":append"]:
748 match = True 837 match = True
749 if o: 838 if o:
750 for o2 in o.split("_"): 839 for o2 in o.split(":"):
751 if not o2 in self.overrides: 840 if not o2 in self.overrides:
752 match = False 841 match = False
753 if match: 842 if match:
843 if value is None:
844 value = ""
754 value = value + r 845 value = value + r
755 846
756 if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing: 847 if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing:
757 if not value:
758 value = ""
759 self.need_overrides() 848 self.need_overrides()
760 for (r, o) in local_var["_prepend"]: 849 for (r, o) in local_var[":prepend"]:
761 850
762 match = True 851 match = True
763 if o: 852 if o:
764 for o2 in o.split("_"): 853 for o2 in o.split(":"):
765 if not o2 in self.overrides: 854 if not o2 in self.overrides:
766 match = False 855 match = False
767 if match: 856 if match:
857 if value is None:
858 value = ""
768 value = r + value 859 value = r + value
769 860
770 parser = None 861 parser = None
@@ -773,12 +864,12 @@ class DataSmart(MutableMapping):
773 if expand: 864 if expand:
774 value = parser.value 865 value = parser.value
775 866
776 if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing: 867 if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing:
777 self.need_overrides() 868 self.need_overrides()
778 for (r, o) in local_var["_remove"]: 869 for (r, o) in local_var[":remove"]:
779 match = True 870 match = True
780 if o: 871 if o:
781 for o2 in o.split("_"): 872 for o2 in o.split(":"):
782 if not o2 in self.overrides: 873 if not o2 in self.overrides:
783 match = False 874 match = False
784 if match: 875 if match:
@@ -791,7 +882,7 @@ class DataSmart(MutableMapping):
791 expanded_removes[r] = self.expand(r).split() 882 expanded_removes[r] = self.expand(r).split()
792 883
793 parser.removes = set() 884 parser.removes = set()
794 val = "" 885 val = []
795 for v in __whitespace_split__.split(parser.value): 886 for v in __whitespace_split__.split(parser.value):
796 skip = False 887 skip = False
797 for r in removes: 888 for r in removes:
@@ -800,8 +891,8 @@ class DataSmart(MutableMapping):
800 skip = True 891 skip = True
801 if skip: 892 if skip:
802 continue 893 continue
803 val = val + v 894 val.append(v)
804 parser.value = val 895 parser.value = "".join(val)
805 if expand: 896 if expand:
806 value = parser.value 897 value = parser.value
807 898
@@ -816,7 +907,7 @@ class DataSmart(MutableMapping):
816 def delVarFlag(self, var, flag, **loginfo): 907 def delVarFlag(self, var, flag, **loginfo):
817 self.expand_cache = {} 908 self.expand_cache = {}
818 909
819 local_var, _ = self._findVar(var) 910 local_var = self._findVar(var)
820 if not local_var: 911 if not local_var:
821 return 912 return
822 if not var in self.dict: 913 if not var in self.dict:
@@ -859,12 +950,12 @@ class DataSmart(MutableMapping):
859 self.dict[var][i] = flags[i] 950 self.dict[var][i] = flags[i]
860 951
861 def getVarFlags(self, var, expand = False, internalflags=False): 952 def getVarFlags(self, var, expand = False, internalflags=False):
862 local_var, _ = self._findVar(var) 953 local_var = self._findVar(var)
863 flags = {} 954 flags = {}
864 955
865 if local_var: 956 if local_var:
866 for i in local_var: 957 for i in local_var:
867 if i.startswith("_") and not internalflags: 958 if i.startswith(("_", ":")) and not internalflags:
868 continue 959 continue
869 flags[i] = local_var[i] 960 flags[i] = local_var[i]
870 if expand and i in expand: 961 if expand and i in expand:
@@ -905,6 +996,7 @@ class DataSmart(MutableMapping):
905 data.inchistory = self.inchistory.copy() 996 data.inchistory = self.inchistory.copy()
906 997
907 data._tracking = self._tracking 998 data._tracking = self._tracking
999 data._var_renames = self._var_renames
908 1000
909 data.overrides = None 1001 data.overrides = None
910 data.overridevars = copy.copy(self.overridevars) 1002 data.overridevars = copy.copy(self.overridevars)
@@ -927,7 +1019,7 @@ class DataSmart(MutableMapping):
927 value = self.getVar(variable, False) 1019 value = self.getVar(variable, False)
928 for key in keys: 1020 for key in keys:
929 referrervalue = self.getVar(key, False) 1021 referrervalue = self.getVar(key, False)
930 if referrervalue and ref in referrervalue: 1022 if referrervalue and isinstance(referrervalue, str) and ref in referrervalue:
931 self.setVar(key, referrervalue.replace(ref, value)) 1023 self.setVar(key, referrervalue.replace(ref, value))
932 1024
933 def localkeys(self): 1025 def localkeys(self):
@@ -962,8 +1054,8 @@ class DataSmart(MutableMapping):
962 for (r, o) in self.overridedata[var]: 1054 for (r, o) in self.overridedata[var]:
963 if o in self.overridesset: 1055 if o in self.overridesset:
964 overrides.add(var) 1056 overrides.add(var)
965 elif "_" in o: 1057 elif ":" in o:
966 if set(o.split("_")).issubset(self.overridesset): 1058 if set(o.split(":")).issubset(self.overridesset):
967 overrides.add(var) 1059 overrides.add(var)
968 1060
969 for k in keylist(self.dict): 1061 for k in keylist(self.dict):
@@ -993,10 +1085,10 @@ class DataSmart(MutableMapping):
993 d = self.createCopy() 1085 d = self.createCopy()
994 bb.data.expandKeys(d) 1086 bb.data.expandKeys(d)
995 1087
996 config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) 1088 config_ignore_vars = set((d.getVar("BB_HASHCONFIG_IGNORE_VARS") or "").split())
997 keys = set(key for key in iter(d) if not key.startswith("__")) 1089 keys = set(key for key in iter(d) if not key.startswith("__"))
998 for key in keys: 1090 for key in keys:
999 if key in config_whitelist: 1091 if key in config_ignore_vars:
1000 continue 1092 continue
1001 1093
1002 value = d.getVar(key, False) or "" 1094 value = d.getVar(key, False) or ""
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 23e1f3187b..4761c86880 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -40,7 +40,7 @@ class HeartbeatEvent(Event):
40 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often 40 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often
41 (runQueueTaskStarted when there are many short tasks) or not at all for long periods 41 (runQueueTaskStarted when there are many short tasks) or not at all for long periods
42 of time (again runQueueTaskStarted, when there is just one long-running task), so this 42 of time (again runQueueTaskStarted, when there is just one long-running task), so this
43 event is more suitable for doing some task-independent work occassionally.""" 43 event is more suitable for doing some task-independent work occasionally."""
44 def __init__(self, time): 44 def __init__(self, time):
45 Event.__init__(self) 45 Event.__init__(self)
46 self.time = time 46 self.time = time
@@ -68,29 +68,39 @@ _catchall_handlers = {}
68_eventfilter = None 68_eventfilter = None
69_uiready = False 69_uiready = False
70_thread_lock = threading.Lock() 70_thread_lock = threading.Lock()
71_thread_lock_enabled = False 71_heartbeat_enabled = False
72 72_should_exit = threading.Event()
73if hasattr(__builtins__, '__setitem__'):
74 builtins = __builtins__
75else:
76 builtins = __builtins__.__dict__
77 73
78def enable_threadlock(): 74def enable_threadlock():
79 global _thread_lock_enabled 75 # Always needed now
80 _thread_lock_enabled = True 76 return
81 77
82def disable_threadlock(): 78def disable_threadlock():
83 global _thread_lock_enabled 79 # Always needed now
84 _thread_lock_enabled = False 80 return
81
82def enable_heartbeat():
83 global _heartbeat_enabled
84 _heartbeat_enabled = True
85
86def disable_heartbeat():
87 global _heartbeat_enabled
88 _heartbeat_enabled = False
89
90#
91# In long running code, this function should be called periodically
92# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
93#
94def check_for_interrupts(d):
95 global _should_exit
96 if _should_exit.is_set():
97 bb.warn("Exiting due to interrupt.")
98 raise bb.BBHandledException()
85 99
86def execute_handler(name, handler, event, d): 100def execute_handler(name, handler, event, d):
87 event.data = d 101 event.data = d
88 addedd = False
89 if 'd' not in builtins:
90 builtins['d'] = d
91 addedd = True
92 try: 102 try:
93 ret = handler(event) 103 ret = handler(event, d)
94 except (bb.parse.SkipRecipe, bb.BBHandledException): 104 except (bb.parse.SkipRecipe, bb.BBHandledException):
95 raise 105 raise
96 except Exception: 106 except Exception:
@@ -104,8 +114,7 @@ def execute_handler(name, handler, event, d):
104 raise 114 raise
105 finally: 115 finally:
106 del event.data 116 del event.data
107 if addedd: 117
108 del builtins['d']
109 118
110def fire_class_handlers(event, d): 119def fire_class_handlers(event, d):
111 if isinstance(event, logging.LogRecord): 120 if isinstance(event, logging.LogRecord):
@@ -118,7 +127,7 @@ def fire_class_handlers(event, d):
118 if _eventfilter: 127 if _eventfilter:
119 if not _eventfilter(name, handler, event, d): 128 if not _eventfilter(name, handler, event, d):
120 continue 129 continue
121 if d and not name in (d.getVar("__BBHANDLERS_MC") or []): 130 if d is not None and not name in (d.getVar("__BBHANDLERS_MC") or set()):
122 continue 131 continue
123 execute_handler(name, handler, event, d) 132 execute_handler(name, handler, event, d)
124 133
@@ -132,8 +141,14 @@ def print_ui_queue():
132 if not _uiready: 141 if not _uiready:
133 from bb.msg import BBLogFormatter 142 from bb.msg import BBLogFormatter
134 # Flush any existing buffered content 143 # Flush any existing buffered content
135 sys.stdout.flush() 144 try:
136 sys.stderr.flush() 145 sys.stdout.flush()
146 except:
147 pass
148 try:
149 sys.stderr.flush()
150 except:
151 pass
137 stdout = logging.StreamHandler(sys.stdout) 152 stdout = logging.StreamHandler(sys.stdout)
138 stderr = logging.StreamHandler(sys.stderr) 153 stderr = logging.StreamHandler(sys.stderr)
139 formatter = BBLogFormatter("%(levelname)s: %(message)s") 154 formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -174,36 +189,30 @@ def print_ui_queue():
174 189
175def fire_ui_handlers(event, d): 190def fire_ui_handlers(event, d):
176 global _thread_lock 191 global _thread_lock
177 global _thread_lock_enabled
178 192
179 if not _uiready: 193 if not _uiready:
180 # No UI handlers registered yet, queue up the messages 194 # No UI handlers registered yet, queue up the messages
181 ui_queue.append(event) 195 ui_queue.append(event)
182 return 196 return
183 197
184 if _thread_lock_enabled: 198 with bb.utils.lock_timeout(_thread_lock):
185 _thread_lock.acquire() 199 errors = []
186 200 for h in _ui_handlers:
187 errors = [] 201 #print "Sending event %s" % event
188 for h in _ui_handlers: 202 try:
189 #print "Sending event %s" % event 203 if not _ui_logfilters[h].filter(event):
190 try: 204 continue
191 if not _ui_logfilters[h].filter(event): 205 # We use pickle here since it better handles object instances
192 continue 206 # which xmlrpc's marshaller does not. Events *must* be serializable
193 # We use pickle here since it better handles object instances 207 # by pickle.
194 # which xmlrpc's marshaller does not. Events *must* be serializable 208 if hasattr(_ui_handlers[h].event, "sendpickle"):
195 # by pickle. 209 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
196 if hasattr(_ui_handlers[h].event, "sendpickle"): 210 else:
197 _ui_handlers[h].event.sendpickle((pickle.dumps(event))) 211 _ui_handlers[h].event.send(event)
198 else: 212 except:
199 _ui_handlers[h].event.send(event) 213 errors.append(h)
200 except: 214 for h in errors:
201 errors.append(h) 215 del _ui_handlers[h]
202 for h in errors:
203 del _ui_handlers[h]
204
205 if _thread_lock_enabled:
206 _thread_lock.release()
207 216
208def fire(event, d): 217def fire(event, d):
209 """Fire off an Event""" 218 """Fire off an Event"""
@@ -232,26 +241,31 @@ noop = lambda _: None
232def register(name, handler, mask=None, filename=None, lineno=None, data=None): 241def register(name, handler, mask=None, filename=None, lineno=None, data=None):
233 """Register an Event handler""" 242 """Register an Event handler"""
234 243
235 if data and data.getVar("BB_CURRENT_MC"): 244 if data is not None and data.getVar("BB_CURRENT_MC"):
236 mc = data.getVar("BB_CURRENT_MC") 245 mc = data.getVar("BB_CURRENT_MC")
237 name = '%s%s' % (mc.replace('-', '_'), name) 246 name = '%s%s' % (mc.replace('-', '_'), name)
238 247
239 # already registered 248 # already registered
240 if name in _handlers: 249 if name in _handlers:
250 if data is not None:
251 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
252 bbhands_mc.add(name)
253 data.setVar("__BBHANDLERS_MC", bbhands_mc)
241 return AlreadyRegistered 254 return AlreadyRegistered
242 255
243 if handler is not None: 256 if handler is not None:
244 # handle string containing python code 257 # handle string containing python code
245 if isinstance(handler, str): 258 if isinstance(handler, str):
246 tmp = "def %s(e):\n%s" % (name, handler) 259 tmp = "def %s(e, d):\n%s" % (name, handler)
260 # Inject empty lines to make code match lineno in filename
261 if lineno is not None:
262 tmp = "\n" * (lineno-1) + tmp
247 try: 263 try:
248 code = bb.methodpool.compile_cache(tmp) 264 code = bb.methodpool.compile_cache(tmp)
249 if not code: 265 if not code:
250 if filename is None: 266 if filename is None:
251 filename = "%s(e)" % name 267 filename = "%s(e, d)" % name
252 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) 268 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
253 if lineno is not None:
254 ast.increment_lineno(code, lineno-1)
255 code = compile(code, filename, "exec") 269 code = compile(code, filename, "exec")
256 bb.methodpool.compile_cache_add(tmp, code) 270 bb.methodpool.compile_cache_add(tmp, code)
257 except SyntaxError: 271 except SyntaxError:
@@ -274,16 +288,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
274 _event_handler_map[m] = {} 288 _event_handler_map[m] = {}
275 _event_handler_map[m][name] = True 289 _event_handler_map[m][name] = True
276 290
277 if data: 291 if data is not None:
278 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 292 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
279 bbhands_mc.append(name) 293 bbhands_mc.add(name)
280 data.setVar("__BBHANDLERS_MC", bbhands_mc) 294 data.setVar("__BBHANDLERS_MC", bbhands_mc)
281 295
282 return Registered 296 return Registered
283 297
284def remove(name, handler, data=None): 298def remove(name, handler, data=None):
285 """Remove an Event handler""" 299 """Remove an Event handler"""
286 if data: 300 if data is not None:
287 if data.getVar("BB_CURRENT_MC"): 301 if data.getVar("BB_CURRENT_MC"):
288 mc = data.getVar("BB_CURRENT_MC") 302 mc = data.getVar("BB_CURRENT_MC")
289 name = '%s%s' % (mc.replace('-', '_'), name) 303 name = '%s%s' % (mc.replace('-', '_'), name)
@@ -295,8 +309,8 @@ def remove(name, handler, data=None):
295 if name in _event_handler_map[event]: 309 if name in _event_handler_map[event]:
296 _event_handler_map[event].pop(name) 310 _event_handler_map[event].pop(name)
297 311
298 if data: 312 if data is not None:
299 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 313 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
300 if name in bbhands_mc: 314 if name in bbhands_mc:
301 bbhands_mc.remove(name) 315 bbhands_mc.remove(name)
302 data.setVar("__BBHANDLERS_MC", bbhands_mc) 316 data.setVar("__BBHANDLERS_MC", bbhands_mc)
@@ -313,21 +327,23 @@ def set_eventfilter(func):
313 _eventfilter = func 327 _eventfilter = func
314 328
315def register_UIHhandler(handler, mainui=False): 329def register_UIHhandler(handler, mainui=False):
316 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 330 with bb.utils.lock_timeout(_thread_lock):
317 _ui_handlers[_ui_handler_seq] = handler 331 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
318 level, debug_domains = bb.msg.constructLogOptions() 332 _ui_handlers[_ui_handler_seq] = handler
319 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) 333 level, debug_domains = bb.msg.constructLogOptions()
320 if mainui: 334 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
321 global _uiready 335 if mainui:
322 _uiready = _ui_handler_seq 336 global _uiready
323 return _ui_handler_seq 337 _uiready = _ui_handler_seq
338 return _ui_handler_seq
324 339
325def unregister_UIHhandler(handlerNum, mainui=False): 340def unregister_UIHhandler(handlerNum, mainui=False):
326 if mainui: 341 if mainui:
327 global _uiready 342 global _uiready
328 _uiready = False 343 _uiready = False
329 if handlerNum in _ui_handlers: 344 with bb.utils.lock_timeout(_thread_lock):
330 del _ui_handlers[handlerNum] 345 if handlerNum in _ui_handlers:
346 del _ui_handlers[handlerNum]
331 return 347 return
332 348
333def get_uihandler(): 349def get_uihandler():
@@ -482,7 +498,7 @@ class BuildCompleted(BuildBase, OperationCompleted):
482 BuildBase.__init__(self, n, p, failures) 498 BuildBase.__init__(self, n, p, failures)
483 499
484class DiskFull(Event): 500class DiskFull(Event):
485 """Disk full case build aborted""" 501 """Disk full case build halted"""
486 def __init__(self, dev, type, freespace, mountpoint): 502 def __init__(self, dev, type, freespace, mountpoint):
487 Event.__init__(self) 503 Event.__init__(self)
488 self._dev = dev 504 self._dev = dev
@@ -666,6 +682,17 @@ class ReachableStamps(Event):
666 Event.__init__(self) 682 Event.__init__(self)
667 self.stamps = stamps 683 self.stamps = stamps
668 684
685class StaleSetSceneTasks(Event):
686 """
687 An event listing setscene tasks which are 'stale' and will
688 be rerun. The metadata may use to clean up stale data.
689 tasks is a mapping of tasks and matching stale stamps.
690 """
691
692 def __init__(self, tasks):
693 Event.__init__(self)
694 self.tasks = tasks
695
669class FilesMatchingFound(Event): 696class FilesMatchingFound(Event):
670 """ 697 """
671 Event when a list of files matching the supplied pattern has 698 Event when a list of files matching the supplied pattern has
@@ -749,7 +776,7 @@ class LogHandler(logging.Handler):
749class MetadataEvent(Event): 776class MetadataEvent(Event):
750 """ 777 """
751 Generic event that target for OE-Core classes 778 Generic event that target for OE-Core classes
752 to report information during asynchrous execution 779 to report information during asynchronous execution
753 """ 780 """
754 def __init__(self, eventtype, eventdata): 781 def __init__(self, eventtype, eventdata):
755 Event.__init__(self) 782 Event.__init__(self)
@@ -830,3 +857,19 @@ class FindSigInfoResult(Event):
830 def __init__(self, result): 857 def __init__(self, result):
831 Event.__init__(self) 858 Event.__init__(self)
832 self.result = result 859 self.result = result
860
861class GetTaskSignatureResult(Event):
862 """
863 Event to return results from GetTaskSignatures command
864 """
865 def __init__(self, sig):
866 Event.__init__(self)
867 self.sig = sig
868
869class ParseError(Event):
870 """
871 Event to indicate parse failed
872 """
873 def __init__(self, msg):
874 super().__init__()
875 self._msg = msg
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
index ecbad59970..801db9c82f 100644
--- a/bitbake/lib/bb/exceptions.py
+++ b/bitbake/lib/bb/exceptions.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/bitbake/lib/bb/fetch2/README b/bitbake/lib/bb/fetch2/README
new file mode 100644
index 0000000000..67b787ef47
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/README
@@ -0,0 +1,57 @@
1There are expectations of users of the fetcher code. This file attempts to document
2some of the constraints that are present. Some are obvious, some are less so. It is
3documented in the context of how OE uses it but the API calls are generic.
4
5a) network access for sources is only expected to happen in the do_fetch task.
6 This is not enforced or tested but is required so that we can:
7
8 i) audit the sources used (i.e. for license/manifest reasons)
9 ii) support offline builds with a suitable cache
10 iii) allow work to continue even with downtime upstream
11 iv) allow for changes upstream in incompatible ways
12 v) allow rebuilding of the software in X years time
13
14b) network access is not expected in do_unpack task.
15
16c) you can take DL_DIR and use it as a mirror for offline builds.
17
18d) access to the network is only made when explicitly configured in recipes
19 (e.g. use of AUTOREV, or use of git tags which change revision).
20
21e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it
22 will match in future exactly in a clean build with a new DL_DIR).
23 One specific pain point example are git tags. They can be replaced and change
24 so the git fetcher has to resolve them with the network. We use git revisions
25 where possible to avoid this and ensure determinism.
26
27f) network access is expected to work with the standard linux proxy variables
28 so that access behind firewalls works (the fetcher sets these in the
29 environment but only in the do_fetch tasks).
30
31g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV
32 git recipe might be ok but you can't expect to checkout a git tree.
33
34h) we need to provide revision information during parsing such that a version
35 for the recipe can be constructed.
36
37i) versions are expected to be able to increase in a way which sorts allowing
38 package feeds to operate (see PR server required for git revisions to sort).
39
40j) API to query for possible version upgrades of a url is highly desireable to
41 allow our automated upgrage code to function (it is implied this does always
42 have network access).
43
44k) Where fixes or changes to behaviour in the fetcher are made, we ask that
45 test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do
46 have fairly extensive test coverage of the fetcher as it is the only way
47 to track all of its corner cases, it still doesn't give entire coverage
48 though sadly.
49
50l) If using tools during parse time, they will have to be in ASSUME_PROVIDED
51 in OE's context as we can't build git-native, then parse a recipe and use
52 git ls-remote.
53
54Not all fetchers support all features, autorev is optional and doesn't make
55sense for some. Upgrade detection means different things in different contexts
56too.
57
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 19169d780f..5bf2c4b8cf 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -113,7 +113,7 @@ class MissingParameterError(BBFetchException):
113 self.args = (missing, url) 113 self.args = (missing, url)
114 114
115class ParameterError(BBFetchException): 115class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters.""" 116 """Exception raised when a url cannot be processed due to invalid parameters."""
117 def __init__(self, message, url): 117 def __init__(self, message, url):
118 msg = "URL: '%s' has invalid parameters. %s" % (url, message) 118 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url 119 self.url = url
@@ -182,7 +182,7 @@ class URI(object):
182 Some notes about relative URIs: while it's specified that 182 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly 183 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the 184 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI 185 fetch2 library did not conform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in 186 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class 187 a way comforming to bitbake's current usage. This URI class
188 supports the following: 188 supports the following:
@@ -199,7 +199,7 @@ class URI(object):
199 file://hostname/absolute/path.diff (would be IETF compliant) 199 file://hostname/absolute/path.diff (would be IETF compliant)
200 200
201 Note that the last case only applies to a list of 201 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires 202 explicitly allowed schemes (currently only file://), that requires
203 its URIs to not have a network location. 203 its URIs to not have a network location.
204 """ 204 """
205 205
@@ -290,12 +290,12 @@ class URI(object):
290 290
291 def _param_str_split(self, string, elmdelim, kvdelim="="): 291 def _param_str_split(self, string, elmdelim, kvdelim="="):
292 ret = collections.OrderedDict() 292 ret = collections.OrderedDict()
293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: 293 for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
294 ret[k] = v 294 ret[k] = v
295 return ret 295 return ret
296 296
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="): 297 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) 298 return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
299 299
300 @property 300 @property
301 def hostport(self): 301 def hostport(self):
@@ -388,7 +388,7 @@ def decodeurl(url):
388 if s: 388 if s:
389 if not '=' in s: 389 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) 390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=') 391 s1, s2 = s.split('=', 1)
392 p[s1] = s2 392 p[s1] = s2
393 393
394 return type, host, urllib.parse.unquote(path), user, pswd, p 394 return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -402,24 +402,24 @@ def encodeurl(decoded):
402 402
403 if not type: 403 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type 405 url = ['%s://' % type]
406 if user and type != "file": 406 if user and type != "file":
407 url += "%s" % user 407 url.append("%s" % user)
408 if pswd: 408 if pswd:
409 url += ":%s" % pswd 409 url.append(":%s" % pswd)
410 url += "@" 410 url.append("@")
411 if host and type != "file": 411 if host and type != "file":
412 url += "%s" % host 412 url.append("%s" % host)
413 if path: 413 if path:
414 # Standardise path to ensure comparisons work 414 # Standardise path to ensure comparisons work
415 while '//' in path: 415 while '//' in path:
416 path = path.replace("//", "/") 416 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path) 417 url.append("%s" % urllib.parse.quote(path))
418 if p: 418 if p:
419 for parm in p: 419 for parm in p:
420 url += ";%s=%s" % (parm, p[parm]) 420 url.append(";%s=%s" % (parm, p[parm]))
421 421
422 return url 422 return "".join(url)
423 423
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 425 if not ud.url or not uri_find or not uri_replace:
@@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
430 uri_replace_decoded = list(decodeurl(uri_replace)) 430 uri_replace_decoded = list(decodeurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 432 result_decoded = ['', '', '', '', '', {}]
433 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
433 for loc, i in enumerate(uri_find_decoded): 434 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc] 435 result_decoded[loc] = uri_decoded[loc]
435 regexp = i 436 regexp = i
@@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
449 for l in replacements: 450 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) 451 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k] 452 result_decoded[loc][k] = uri_replace_decoded[loc][k]
453 elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
454 # User/password in the replacement is just a straight replacement
455 result_decoded[loc] = uri_replace_decoded[loc]
452 elif (re.match(regexp, uri_decoded[loc])): 456 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]: 457 if not uri_replace_decoded[loc]:
454 result_decoded[loc] = "" 458 result_decoded[loc] = ""
@@ -465,10 +469,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
465 basename = os.path.basename(mirrortarball) 469 basename = os.path.basename(mirrortarball)
466 # Kill parameters, they make no sense for mirror tarballs 470 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {} 471 uri_decoded[5] = {}
472 uri_find_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud): 473 elif ud.localpath and ud.method.supports_checksum(ud):
469 basename = os.path.basename(ud.localpath) 474 basename = os.path.basename(ud.localpath)
470 if basename and not result_decoded[loc].endswith(basename): 475 if basename:
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename) 476 uri_basename = os.path.basename(uri_decoded[loc])
477 # Prefix with a slash as a sentinel in case
478 # result_decoded[loc] does not contain one.
479 path = "/" + result_decoded[loc]
480 if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
481 result_decoded[loc] = path[1:-len(uri_basename)] + basename
482 elif not path.endswith("/" + basename):
483 result_decoded[loc] = os.path.join(path[1:], basename)
472 else: 484 else:
473 return None 485 return None
474 result = encodeurl(result_decoded) 486 result = encodeurl(result_decoded)
@@ -506,7 +518,7 @@ def fetcher_init(d):
506 else: 518 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 519 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508 520
509 _checksum_cache.init_cache(d) 521 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
510 522
511 for m in methods: 523 for m in methods:
512 if hasattr(m, "init"): 524 if hasattr(m, "init"):
@@ -534,7 +546,7 @@ def mirror_from_string(data):
534 bb.warn('Invalid mirror data %s, should have paired members.' % data) 546 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2)) 547 return list(zip(*[iter(mirrors)]*2))
536 548
537def verify_checksum(ud, d, precomputed={}): 549def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
538 """ 550 """
539 verify the MD5 and SHA256 checksum for downloaded src 551 verify the MD5 and SHA256 checksum for downloaded src
540 552
@@ -548,20 +560,25 @@ def verify_checksum(ud, d, precomputed={}):
548 file against those in the recipe each time, rather than only after 560 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. 561 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """ 562 """
551
552 if ud.ignore_checksums or not ud.method.supports_checksum(ud): 563 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {} 564 return {}
554 565
566 if localpath is None:
567 localpath = ud.localpath
568
555 def compute_checksum_info(checksum_id): 569 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id) 570 checksum_name = getattr(ud, "%s_name" % checksum_id)
557 571
558 if checksum_id in precomputed: 572 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id] 573 checksum_data = precomputed[checksum_id]
560 else: 574 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) 575 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
562 576
563 checksum_expected = getattr(ud, "%s_expected" % checksum_id) 577 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564 578
579 if checksum_expected == '':
580 checksum_expected = None
581
565 return { 582 return {
566 "id": checksum_id, 583 "id": checksum_id,
567 "name": checksum_name, 584 "name": checksum_name,
@@ -581,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}):
581 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] 598 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
582 599
583 # If no checksum has been provided 600 # If no checksum has been provided
584 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): 601 if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
585 messages = [] 602 messages = []
586 strict = d.getVar("BB_STRICT_CHECKSUM") or "0" 603 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
587 604
588 # If strict checking enabled and neither sum defined, raise error 605 # If strict checking enabled and neither sum defined, raise error
589 if strict == "1": 606 if strict == "1":
590 messages.append("No checksum specified for '%s', please add at " \ 607 raise NoChecksumError("\n".join(checksum_lines))
591 "least one to the recipe:" % ud.localpath)
592 messages.extend(checksum_lines)
593 logger.error("\n".join(messages))
594 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
595 608
596 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) 609 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
597 610
@@ -612,8 +625,8 @@ def verify_checksum(ud, d, precomputed={}):
612 625
613 for ci in checksum_infos: 626 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]: 627 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \ 628 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) 629 "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"] 630 bad_checksum = ci["data"]
618 631
619 if bad_checksum: 632 if bad_checksum:
@@ -731,13 +744,16 @@ def subprocess_setup():
731 # SIGPIPE errors are known issues with gzip/bash 744 # SIGPIPE errors are known issues with gzip/bash
732 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 745 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
733 746
734def get_autorev(d): 747def mark_recipe_nocache(d):
735 # only not cache src rev in autorev case
736 if d.getVar('BB_SRCREV_POLICY') != "cache": 748 if d.getVar('BB_SRCREV_POLICY') != "cache":
737 d.setVar('BB_DONT_CACHE', '1') 749 d.setVar('BB_DONT_CACHE', '1')
750
751def get_autorev(d):
752 mark_recipe_nocache(d)
753 d.setVar("__BBAUTOREV_SEEN", True)
738 return "AUTOINC" 754 return "AUTOINC"
739 755
740def get_srcrev(d, method_name='sortable_revision'): 756def _get_srcrev(d, method_name='sortable_revision'):
741 """ 757 """
742 Return the revision string, usually for use in the version string (PV) of the current package 758 Return the revision string, usually for use in the version string (PV) of the current package
743 Most packages usually only have one SCM so we just pass on the call. 759 Most packages usually only have one SCM so we just pass on the call.
@@ -751,23 +767,34 @@ def get_srcrev(d, method_name='sortable_revision'):
751 that fetcher provides a method with the given name and the same signature as sortable_revision. 767 that fetcher provides a method with the given name and the same signature as sortable_revision.
752 """ 768 """
753 769
770 d.setVar("__BBSRCREV_SEEN", "1")
771 recursion = d.getVar("__BBINSRCREV")
772 if recursion:
773 raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
774 d.setVar("__BBINSRCREV", True)
775
754 scms = [] 776 scms = []
777 revs = []
755 fetcher = Fetch(d.getVar('SRC_URI').split(), d) 778 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
756 urldata = fetcher.ud 779 urldata = fetcher.ud
757 for u in urldata: 780 for u in urldata:
758 if urldata[u].method.supports_srcrev(): 781 if urldata[u].method.supports_srcrev():
759 scms.append(u) 782 scms.append(u)
760 783
761 if len(scms) == 0: 784 if not scms:
762 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") 785 d.delVar("__BBINSRCREV")
786 return "", revs
787
763 788
764 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 789 if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
765 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 790 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
791 revs.append(rev)
766 if len(rev) > 10: 792 if len(rev) > 10:
767 rev = rev[:10] 793 rev = rev[:10]
794 d.delVar("__BBINSRCREV")
768 if autoinc: 795 if autoinc:
769 return "AUTOINC+" + rev 796 return "AUTOINC+" + rev, revs
770 return rev 797 return rev, revs
771 798
772 # 799 #
773 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 800 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -783,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'):
783 ud = urldata[scm] 810 ud = urldata[scm]
784 for name in ud.names: 811 for name in ud.names:
785 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 812 autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
813 revs.append(rev)
786 seenautoinc = seenautoinc or autoinc 814 seenautoinc = seenautoinc or autoinc
787 if len(rev) > 10: 815 if len(rev) > 10:
788 rev = rev[:10] 816 rev = rev[:10]
@@ -799,12 +827,70 @@ def get_srcrev(d, method_name='sortable_revision'):
799 if seenautoinc: 827 if seenautoinc:
800 format = "AUTOINC+" + format 828 format = "AUTOINC+" + format
801 829
802 return format 830 d.delVar("__BBINSRCREV")
831 return format, revs
832
833def get_hashvalue(d, method_name='sortable_revision'):
834 pkgv, revs = _get_srcrev(d, method_name=method_name)
835 return " ".join(revs)
836
837def get_pkgv_string(d, method_name='sortable_revision'):
838 pkgv, revs = _get_srcrev(d, method_name=method_name)
839 return pkgv
840
841def get_srcrev(d, method_name='sortable_revision'):
842 pkgv, revs = _get_srcrev(d, method_name=method_name)
843 if not pkgv:
844 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
845 return pkgv
803 846
804def localpath(url, d): 847def localpath(url, d):
805 fetcher = bb.fetch2.Fetch([url], d) 848 fetcher = bb.fetch2.Fetch([url], d)
806 return fetcher.localpath(url) 849 return fetcher.localpath(url)
807 850
851# Need to export PATH as binary could be in metadata paths
852# rather than host provided
853# Also include some other variables.
854FETCH_EXPORT_VARS = ['HOME', 'PATH',
855 'HTTP_PROXY', 'http_proxy',
856 'HTTPS_PROXY', 'https_proxy',
857 'FTP_PROXY', 'ftp_proxy',
858 'FTPS_PROXY', 'ftps_proxy',
859 'NO_PROXY', 'no_proxy',
860 'ALL_PROXY', 'all_proxy',
861 'GIT_PROXY_COMMAND',
862 'GIT_SSH',
863 'GIT_SSH_COMMAND',
864 'GIT_SSL_CAINFO',
865 'GIT_SMART_HTTP',
866 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
867 'SOCKS5_USER', 'SOCKS5_PASSWD',
868 'DBUS_SESSION_BUS_ADDRESS',
869 'P4CONFIG',
870 'SSL_CERT_FILE',
871 'NODE_EXTRA_CA_CERTS',
872 'AWS_PROFILE',
873 'AWS_ACCESS_KEY_ID',
874 'AWS_SECRET_ACCESS_KEY',
875 'AWS_ROLE_ARN',
876 'AWS_WEB_IDENTITY_TOKEN_FILE',
877 'AWS_DEFAULT_REGION',
878 'AWS_SESSION_TOKEN',
879 'GIT_CACHE_PATH',
880 'REMOTE_CONTAINERS_IPC',
881 'SSL_CERT_DIR']
882
883def get_fetcher_environment(d):
884 newenv = {}
885 origenv = d.getVar("BB_ORIGENV")
886 for name in bb.fetch2.FETCH_EXPORT_VARS:
887 value = d.getVar(name)
888 if not value and origenv:
889 value = origenv.getVar(name)
890 if value:
891 newenv[name] = value
892 return newenv
893
808def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): 894def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
809 """ 895 """
810 Run cmd returning the command output 896 Run cmd returning the command output
@@ -813,25 +899,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
813 Optionally remove the files/directories listed in cleanup upon failure 899 Optionally remove the files/directories listed in cleanup upon failure
814 """ 900 """
815 901
816 # Need to export PATH as binary could be in metadata paths 902 exportvars = FETCH_EXPORT_VARS
817 # rather than host provided
818 # Also include some other variables.
819 # FIXME: Should really include all export varaiables?
820 exportvars = ['HOME', 'PATH',
821 'HTTP_PROXY', 'http_proxy',
822 'HTTPS_PROXY', 'https_proxy',
823 'FTP_PROXY', 'ftp_proxy',
824 'FTPS_PROXY', 'ftps_proxy',
825 'NO_PROXY', 'no_proxy',
826 'ALL_PROXY', 'all_proxy',
827 'GIT_PROXY_COMMAND',
828 'GIT_SSH',
829 'GIT_SSL_CAINFO',
830 'GIT_SMART_HTTP',
831 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
832 'SOCKS5_USER', 'SOCKS5_PASSWD',
833 'DBUS_SESSION_BUS_ADDRESS',
834 'P4CONFIG']
835 903
836 if not cleanup: 904 if not cleanup:
837 cleanup = [] 905 cleanup = []
@@ -868,14 +936,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
868 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) 936 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
869 success = True 937 success = True
870 except bb.process.NotFoundError as e: 938 except bb.process.NotFoundError as e:
871 error_message = "Fetch command %s" % (e.command) 939 error_message = "Fetch command %s not found" % (e.command)
872 except bb.process.ExecutionError as e: 940 except bb.process.ExecutionError as e:
873 if e.stdout: 941 if e.stdout:
874 output = "output:\n%s\n%s" % (e.stdout, e.stderr) 942 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
875 elif e.stderr: 943 elif e.stderr:
876 output = "output:\n%s" % e.stderr 944 output = "output:\n%s" % e.stderr
877 else: 945 else:
878 output = "no output" 946 if log:
947 output = "see logfile for output"
948 else:
949 output = "no output"
879 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) 950 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
880 except bb.process.CmdError as e: 951 except bb.process.CmdError as e:
881 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) 952 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -937,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld):
937 1008
938 try: 1009 try:
939 newud = FetchData(newuri, ld) 1010 newud = FetchData(newuri, ld)
1011 newud.ignore_checksums = True
940 newud.setup_localpath(ld) 1012 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e: 1013 except bb.fetch2.BBFetchException as e:
942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) 1014 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1046,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) 1118 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(str(e)) 1119 logger.debug(str(e))
1048 try: 1120 try:
1049 ud.method.clean(ud, ld) 1121 if ud.method.cleanup_upon_failure():
1122 ud.method.clean(ud, ld)
1050 except UnboundLocalError: 1123 except UnboundLocalError:
1051 pass 1124 pass
1052 return False 1125 return False
@@ -1057,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1057 1130
1058def ensure_symlink(target, link_name): 1131def ensure_symlink(target, link_name):
1059 if not os.path.exists(link_name): 1132 if not os.path.exists(link_name):
1133 dirname = os.path.dirname(link_name)
1134 bb.utils.mkdirhier(dirname)
1060 if os.path.islink(link_name): 1135 if os.path.islink(link_name):
1061 # Broken symbolic link 1136 # Broken symbolic link
1062 os.unlink(link_name) 1137 os.unlink(link_name)
@@ -1140,11 +1215,11 @@ def srcrev_internal_helper(ud, d, name):
1140 pn = d.getVar("PN") 1215 pn = d.getVar("PN")
1141 attempts = [] 1216 attempts = []
1142 if name != '' and pn: 1217 if name != '' and pn:
1143 attempts.append("SRCREV_%s_pn-%s" % (name, pn)) 1218 attempts.append("SRCREV_%s:pn-%s" % (name, pn))
1144 if name != '': 1219 if name != '':
1145 attempts.append("SRCREV_%s" % name) 1220 attempts.append("SRCREV_%s" % name)
1146 if pn: 1221 if pn:
1147 attempts.append("SRCREV_pn-%s" % pn) 1222 attempts.append("SRCREV:pn-%s" % pn)
1148 attempts.append("SRCREV") 1223 attempts.append("SRCREV")
1149 1224
1150 for a in attempts: 1225 for a in attempts:
@@ -1169,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name):
1169 if srcrev == "INVALID" or not srcrev: 1244 if srcrev == "INVALID" or not srcrev:
1170 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1245 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1171 if srcrev == "AUTOINC": 1246 if srcrev == "AUTOINC":
1247 d.setVar("__BBAUTOREV_ACTED_UPON", True)
1172 srcrev = ud.method.latest_revision(ud, d, name) 1248 srcrev = ud.method.latest_revision(ud, d, name)
1173 1249
1174 return srcrev 1250 return srcrev
@@ -1180,23 +1256,21 @@ def get_checksum_file_list(d):
1180 SRC_URI as a space-separated string 1256 SRC_URI as a space-separated string
1181 """ 1257 """
1182 fetch = Fetch([], d, cache = False, localonly = True) 1258 fetch = Fetch([], d, cache = False, localonly = True)
1183
1184 dl_dir = d.getVar('DL_DIR')
1185 filelist = [] 1259 filelist = []
1186 for u in fetch.urls: 1260 for u in fetch.urls:
1187 ud = fetch.ud[u] 1261 ud = fetch.ud[u]
1188
1189 if ud and isinstance(ud.method, local.Local): 1262 if ud and isinstance(ud.method, local.Local):
1190 paths = ud.method.localpaths(ud, d) 1263 found = False
1264 paths = ud.method.localfile_searchpaths(ud, d)
1191 for f in paths: 1265 for f in paths:
1192 pth = ud.decodedurl 1266 pth = ud.decodedurl
1193 if f.startswith(dl_dir): 1267 if os.path.exists(f):
1194 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else 1268 found = True
1195 if os.path.exists(f):
1196 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1197 else:
1198 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1199 filelist.append(f + ":" + str(os.path.exists(f))) 1269 filelist.append(f + ":" + str(os.path.exists(f)))
1270 if not found:
1271 bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
1272 "\nThe following paths were searched:"
1273 "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
1200 1274
1201 return " ".join(filelist) 1275 return " ".join(filelist)
1202 1276
@@ -1243,18 +1317,13 @@ class FetchData(object):
1243 1317
1244 if checksum_name in self.parm: 1318 if checksum_name in self.parm:
1245 checksum_expected = self.parm[checksum_name] 1319 checksum_expected = self.parm[checksum_name]
1246 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: 1320 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]:
1247 checksum_expected = None 1321 checksum_expected = None
1248 else: 1322 else:
1249 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1323 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1250 1324
1251 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1325 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1252 1326
1253 for checksum_id in CHECKSUM_LIST:
1254 configure_checksum(checksum_id)
1255
1256 self.ignore_checksums = False
1257
1258 self.names = self.parm.get("name",'default').split(',') 1327 self.names = self.parm.get("name",'default').split(',')
1259 1328
1260 self.method = None 1329 self.method = None
@@ -1276,6 +1345,11 @@ class FetchData(object):
1276 if hasattr(self.method, "urldata_init"): 1345 if hasattr(self.method, "urldata_init"):
1277 self.method.urldata_init(self, d) 1346 self.method.urldata_init(self, d)
1278 1347
1348 for checksum_id in CHECKSUM_LIST:
1349 configure_checksum(checksum_id)
1350
1351 self.ignore_checksums = False
1352
1279 if "localpath" in self.parm: 1353 if "localpath" in self.parm:
1280 # if user sets localpath for file, use it instead. 1354 # if user sets localpath for file, use it instead.
1281 self.localpath = self.parm["localpath"] 1355 self.localpath = self.parm["localpath"]
@@ -1355,6 +1429,9 @@ class FetchMethod(object):
1355 Is localpath something that can be represented by a checksum? 1429 Is localpath something that can be represented by a checksum?
1356 """ 1430 """
1357 1431
1432 # We cannot compute checksums for None
1433 if urldata.localpath is None:
1434 return False
1358 # We cannot compute checksums for directories 1435 # We cannot compute checksums for directories
1359 if os.path.isdir(urldata.localpath): 1436 if os.path.isdir(urldata.localpath):
1360 return False 1437 return False
@@ -1367,6 +1444,12 @@ class FetchMethod(object):
1367 """ 1444 """
1368 return False 1445 return False
1369 1446
1447 def cleanup_upon_failure(self):
1448 """
1449 When a fetch fails, should clean() be called?
1450 """
1451 return True
1452
1370 def verify_donestamp(self, ud, d): 1453 def verify_donestamp(self, ud, d):
1371 """ 1454 """
1372 Verify the donestamp file 1455 Verify the donestamp file
@@ -1434,30 +1517,33 @@ class FetchMethod(object):
1434 cmd = None 1517 cmd = None
1435 1518
1436 if unpack: 1519 if unpack:
1520 tar_cmd = 'tar --extract --no-same-owner'
1521 if 'striplevel' in urldata.parm:
1522 tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
1437 if file.endswith('.tar'): 1523 if file.endswith('.tar'):
1438 cmd = 'tar x --no-same-owner -f %s' % file 1524 cmd = '%s -f %s' % (tar_cmd, file)
1439 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): 1525 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1440 cmd = 'tar xz --no-same-owner -f %s' % file 1526 cmd = '%s -z -f %s' % (tar_cmd, file)
1441 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): 1527 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1442 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file 1528 cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
1443 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): 1529 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1444 cmd = 'gzip -dc %s > %s' % (file, efile) 1530 cmd = 'gzip -dc %s > %s' % (file, efile)
1445 elif file.endswith('.bz2'): 1531 elif file.endswith('.bz2'):
1446 cmd = 'bzip2 -dc %s > %s' % (file, efile) 1532 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1447 elif file.endswith('.txz') or file.endswith('.tar.xz'): 1533 elif file.endswith('.txz') or file.endswith('.tar.xz'):
1448 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file 1534 cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
1449 elif file.endswith('.xz'): 1535 elif file.endswith('.xz'):
1450 cmd = 'xz -dc %s > %s' % (file, efile) 1536 cmd = 'xz -dc %s > %s' % (file, efile)
1451 elif file.endswith('.tar.lz'): 1537 elif file.endswith('.tar.lz'):
1452 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file 1538 cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
1453 elif file.endswith('.lz'): 1539 elif file.endswith('.lz'):
1454 cmd = 'lzip -dc %s > %s' % (file, efile) 1540 cmd = 'lzip -dc %s > %s' % (file, efile)
1455 elif file.endswith('.tar.7z'): 1541 elif file.endswith('.tar.7z'):
1456 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file 1542 cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
1457 elif file.endswith('.7z'): 1543 elif file.endswith('.7z'):
1458 cmd = '7za x -y %s 1>/dev/null' % file 1544 cmd = '7za x -y %s 1>/dev/null' % file
1459 elif file.endswith('.tzst') or file.endswith('.tar.zst'): 1545 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1460 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file 1546 cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
1461 elif file.endswith('.zst'): 1547 elif file.endswith('.zst'):
1462 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) 1548 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
1463 elif file.endswith('.zip') or file.endswith('.jar'): 1549 elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1490,7 +1576,7 @@ class FetchMethod(object):
1490 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1576 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
1491 else: 1577 else:
1492 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1578 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1493 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) 1579 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
1494 1580
1495 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd 1581 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1496 if 'subdir' in urldata.parm: 1582 if 'subdir' in urldata.parm:
@@ -1506,6 +1592,7 @@ class FetchMethod(object):
1506 unpackdir = rootdir 1592 unpackdir = rootdir
1507 1593
1508 if not unpack or not cmd: 1594 if not unpack or not cmd:
1595 urldata.unpack_tracer.unpack("file-copy", unpackdir)
1509 # If file == dest, then avoid any copies, as we already put the file into dest! 1596 # If file == dest, then avoid any copies, as we already put the file into dest!
1510 dest = os.path.join(unpackdir, os.path.basename(file)) 1597 dest = os.path.join(unpackdir, os.path.basename(file))
1511 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): 1598 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1520,6 +1607,8 @@ class FetchMethod(object):
1520 destdir = urlpath.rsplit("/", 1)[0] + '/' 1607 destdir = urlpath.rsplit("/", 1)[0] + '/'
1521 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1608 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1522 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1609 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
1610 else:
1611 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1523 1612
1524 if not cmd: 1613 if not cmd:
1525 return 1614 return
@@ -1611,12 +1700,61 @@ class FetchMethod(object):
1611 """ 1700 """
1612 return [] 1701 return []
1613 1702
1703
1704class DummyUnpackTracer(object):
1705 """
1706 Abstract API definition for a class that traces unpacked source files back
1707 to their respective upstream SRC_URI entries, for software composition
1708 analysis, license compliance and detailed SBOM generation purposes.
1709 User may load their own unpack tracer class (instead of the dummy
1710 one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
1711 """
1712 def start(self, unpackdir, urldata_dict, d):
1713 """
1714 Start tracing the core Fetch.unpack process, using an index to map
1715 unpacked files to each SRC_URI entry.
1716 This method is called by Fetch.unpack and it may receive nested calls by
1717 gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
1718 URLs and by recursively calling Fetch.unpack from new (nested) Fetch
1719 instances.
1720 """
1721 return
1722 def start_url(self, url):
1723 """Start tracing url unpack process.
1724 This method is called by Fetch.unpack before the fetcher-specific unpack
1725 method starts, and it may receive nested calls by gitsm and npmsw
1726 fetchers.
1727 """
1728 return
1729 def unpack(self, unpack_type, destdir):
1730 """
1731 Set unpack_type and destdir for current url.
1732 This method is called by the fetcher-specific unpack method after url
1733 tracing started.
1734 """
1735 return
1736 def finish_url(self, url):
1737 """Finish tracing url unpack process and update the file index.
1738 This method is called by Fetch.unpack after the fetcher-specific unpack
1739 method finished its job, and it may receive nested calls by gitsm
1740 and npmsw fetchers.
1741 """
1742 return
1743 def complete(self):
1744 """
1745 Finish tracing the Fetch.unpack process, and check if all nested
1746 Fecth.unpack calls (if any) have been completed; if so, save collected
1747 metadata.
1748 """
1749 return
1750
1751
1614class Fetch(object): 1752class Fetch(object):
1615 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): 1753 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1616 if localonly and cache: 1754 if localonly and cache:
1617 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") 1755 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1618 1756
1619 if len(urls) == 0: 1757 if not urls:
1620 urls = d.getVar("SRC_URI").split() 1758 urls = d.getVar("SRC_URI").split()
1621 self.urls = urls 1759 self.urls = urls
1622 self.d = d 1760 self.d = d
@@ -1631,10 +1769,30 @@ class Fetch(object):
1631 if key in urldata_cache: 1769 if key in urldata_cache:
1632 self.ud = urldata_cache[key] 1770 self.ud = urldata_cache[key]
1633 1771
1772 # the unpack_tracer object needs to be made available to possible nested
1773 # Fetch instances (when those are created by gitsm and npmsw fetchers)
1774 # so we set it as a global variable
1775 global unpack_tracer
1776 try:
1777 unpack_tracer
1778 except NameError:
1779 class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
1780 if class_path:
1781 # use user-defined unpack tracer class
1782 import importlib
1783 module_name, _, class_name = class_path.rpartition(".")
1784 module = importlib.import_module(module_name)
1785 class_ = getattr(module, class_name)
1786 unpack_tracer = class_()
1787 else:
1788 # fall back to the dummy/abstract class
1789 unpack_tracer = DummyUnpackTracer()
1790
1634 for url in urls: 1791 for url in urls:
1635 if url not in self.ud: 1792 if url not in self.ud:
1636 try: 1793 try:
1637 self.ud[url] = FetchData(url, d, localonly) 1794 self.ud[url] = FetchData(url, d, localonly)
1795 self.ud[url].unpack_tracer = unpack_tracer
1638 except NonLocalMethod: 1796 except NonLocalMethod:
1639 if localonly: 1797 if localonly:
1640 self.ud[url] = None 1798 self.ud[url] = None
@@ -1673,6 +1831,7 @@ class Fetch(object):
1673 network = self.d.getVar("BB_NO_NETWORK") 1831 network = self.d.getVar("BB_NO_NETWORK")
1674 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) 1832 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
1675 1833
1834 checksum_missing_messages = []
1676 for u in urls: 1835 for u in urls:
1677 ud = self.ud[u] 1836 ud = self.ud[u]
1678 ud.setup_localpath(self.d) 1837 ud.setup_localpath(self.d)
@@ -1684,7 +1843,6 @@ class Fetch(object):
1684 1843
1685 try: 1844 try:
1686 self.d.setVar("BB_NO_NETWORK", network) 1845 self.d.setVar("BB_NO_NETWORK", network)
1687
1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): 1846 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True 1847 done = True
1690 elif m.try_premirror(ud, self.d): 1848 elif m.try_premirror(ud, self.d):
@@ -1705,7 +1863,9 @@ class Fetch(object):
1705 self.d.setVar("BB_NO_NETWORK", "1") 1863 self.d.setVar("BB_NO_NETWORK", "1")
1706 1864
1707 firsterr = None 1865 firsterr = None
1708 verified_stamp = m.verify_donestamp(ud, self.d) 1866 verified_stamp = False
1867 if done:
1868 verified_stamp = m.verify_donestamp(ud, self.d)
1709 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1869 if not done and (not verified_stamp or m.need_update(ud, self.d)):
1710 try: 1870 try:
1711 if not trusted_network(self.d, ud.url): 1871 if not trusted_network(self.d, ud.url):
@@ -1735,7 +1895,7 @@ class Fetch(object):
1735 logger.debug(str(e)) 1895 logger.debug(str(e))
1736 firsterr = e 1896 firsterr = e
1737 # Remove any incomplete fetch 1897 # Remove any incomplete fetch
1738 if not verified_stamp: 1898 if not verified_stamp and m.cleanup_upon_failure():
1739 m.clean(ud, self.d) 1899 m.clean(ud, self.d)
1740 logger.debug("Trying MIRRORS") 1900 logger.debug("Trying MIRRORS")
1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1901 mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
@@ -1754,17 +1914,28 @@ class Fetch(object):
1754 raise ChecksumError("Stale Error Detected") 1914 raise ChecksumError("Stale Error Detected")
1755 1915
1756 except BBFetchException as e: 1916 except BBFetchException as e:
1757 if isinstance(e, ChecksumError): 1917 if isinstance(e, NoChecksumError):
1918 (message, _) = e.args
1919 checksum_missing_messages.append(message)
1920 continue
1921 elif isinstance(e, ChecksumError):
1758 logger.error("Checksum failure fetching %s" % u) 1922 logger.error("Checksum failure fetching %s" % u)
1759 raise 1923 raise
1760 1924
1761 finally: 1925 finally:
1762 if ud.lockfile: 1926 if ud.lockfile:
1763 bb.utils.unlockfile(lf) 1927 bb.utils.unlockfile(lf)
1928 if checksum_missing_messages:
1929 logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
1930 raise BBFetchException("There was some missing checksums in the recipe")
1764 1931
1765 def checkstatus(self, urls=None): 1932 def checkstatus(self, urls=None):
1766 """ 1933 """
1767 Check all urls exist upstream 1934 Check all URLs exist upstream.
1935
1936 Returns None if the URLs exist, raises FetchError if the check wasn't
1937 successful but there wasn't an error (such as file not found), and
1938 raises other exceptions in error cases.
1768 """ 1939 """
1769 1940
1770 if not urls: 1941 if not urls:
@@ -1787,7 +1958,7 @@ class Fetch(object):
1787 ret = m.try_mirrors(self, ud, self.d, mirrors, True) 1958 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1788 1959
1789 if not ret: 1960 if not ret:
1790 raise FetchError("URL %s doesn't work" % u, u) 1961 raise FetchError("URL doesn't work", u)
1791 1962
1792 def unpack(self, root, urls=None): 1963 def unpack(self, root, urls=None):
1793 """ 1964 """
@@ -1797,6 +1968,8 @@ class Fetch(object):
1797 if not urls: 1968 if not urls:
1798 urls = self.urls 1969 urls = self.urls
1799 1970
1971 unpack_tracer.start(root, self.ud, self.d)
1972
1800 for u in urls: 1973 for u in urls:
1801 ud = self.ud[u] 1974 ud = self.ud[u]
1802 ud.setup_localpath(self.d) 1975 ud.setup_localpath(self.d)
@@ -1804,11 +1977,15 @@ class Fetch(object):
1804 if ud.lockfile: 1977 if ud.lockfile:
1805 lf = bb.utils.lockfile(ud.lockfile) 1978 lf = bb.utils.lockfile(ud.lockfile)
1806 1979
1980 unpack_tracer.start_url(u)
1807 ud.method.unpack(ud, root, self.d) 1981 ud.method.unpack(ud, root, self.d)
1982 unpack_tracer.finish_url(u)
1808 1983
1809 if ud.lockfile: 1984 if ud.lockfile:
1810 bb.utils.unlockfile(lf) 1985 bb.utils.unlockfile(lf)
1811 1986
1987 unpack_tracer.complete()
1988
1812 def clean(self, urls=None): 1989 def clean(self, urls=None):
1813 """ 1990 """
1814 Clean files that the fetcher gets or places 1991 Clean files that the fetcher gets or places
@@ -1908,6 +2085,9 @@ from . import repo
1908from . import clearcase 2085from . import clearcase
1909from . import npm 2086from . import npm
1910from . import npmsw 2087from . import npmsw
2088from . import az
2089from . import crate
2090from . import gcp
1911 2091
1912methods.append(local.Local()) 2092methods.append(local.Local())
1913methods.append(wget.Wget()) 2093methods.append(wget.Wget())
@@ -1927,3 +2107,6 @@ methods.append(repo.Repo())
1927methods.append(clearcase.ClearCase()) 2107methods.append(clearcase.ClearCase())
1928methods.append(npm.Npm()) 2108methods.append(npm.Npm())
1929methods.append(npmsw.NpmShrinkWrap()) 2109methods.append(npmsw.NpmShrinkWrap())
2110methods.append(az.Az())
2111methods.append(crate.Crate())
2112methods.append(gcp.GCP())
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py
new file mode 100644
index 0000000000..3ccc594c22
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/az.py
@@ -0,0 +1,93 @@
1"""
2BitBake 'Fetch' Azure Storage implementation
3
4"""
5
6# Copyright (C) 2021 Alejandro Hernandez Samaniego
7#
8# Based on bb.fetch2.wget:
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
15import shlex
16import os
17import bb
18from bb.fetch2 import FetchError
19from bb.fetch2 import logger
20from bb.fetch2.wget import Wget
21
22
23class Az(Wget):
24
25 def supports(self, ud, d):
26 """
27 Check to see if a given url can be fetched from Azure Storage
28 """
29 return ud.type in ['az']
30
31
32 def checkstatus(self, fetch, ud, d, try_again=True):
33
34 # checkstatus discards parameters either way, we need to do this before adding the SAS
35 ud.url = ud.url.replace('az://','https://').split(';')[0]
36
37 az_sas = d.getVar('AZ_SAS')
38 if az_sas and az_sas not in ud.url:
39 ud.url += az_sas
40
41 return Wget.checkstatus(self, fetch, ud, d, try_again)
42
43 # Override download method, include retries
44 def download(self, ud, d, retries=3):
45 """Fetch urls"""
46
47 # If were reaching the account transaction limit we might be refused a connection,
48 # retrying allows us to avoid false negatives since the limit changes over time
49 fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5'
50
51 # We need to provide a localpath to avoid wget using the SAS
52 # ud.localfile either has the downloadfilename or ud.path
53 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
54 bb.utils.mkdirhier(os.path.dirname(localpath))
55 fetchcmd += " -O %s" % shlex.quote(localpath)
56
57
58 if ud.user and ud.pswd:
59 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
60
61 # Check if a Shared Access Signature was given and use it
62 az_sas = d.getVar('AZ_SAS')
63
64 if az_sas:
65 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
66 else:
67 azuri = '%s%s%s' % ('https://', ud.host, ud.path)
68
69 if os.path.exists(ud.localpath):
70 # file exists, but we didnt complete it.. trying again.
71 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % azuri)
72 else:
73 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % azuri)
74
75 try:
76 self._runwget(ud, d, fetchcmd, False)
77 except FetchError as e:
78 # Azure fails on handshake sometimes when using wget after some stress, producing a
79 # FetchError from the fetcher, if the artifact exists retyring should succeed
80 if 'Unable to establish SSL connection' in str(e):
81 logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries)
82 self.download(ud, d, retries -1)
83
84 # Sanity check since wget can pretend it succeed when it didn't
85 # Also, this used to happen if sourceforge sent us to the mirror page
86 if not os.path.exists(ud.localpath):
87 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri)
88
89 if os.path.getsize(ud.localpath) == 0:
90 os.remove(ud.localpath)
91 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri)
92
93 return True
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
new file mode 100644
index 0000000000..01d49435c3
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -0,0 +1,141 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for crates.io
5"""
6
7# Copyright (C) 2016 Doug Goldstein
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11# Based on functions from the base bb module, Copyright 2003 Holger Schurig
12
13import hashlib
14import json
15import os
16import subprocess
17import bb
18from bb.fetch2 import logger, subprocess_setup, UnpackError
19from bb.fetch2.wget import Wget
20
21
22class Crate(Wget):
23
24 """Class to fetch crates via wget"""
25
26 def _cargo_bitbake_path(self, rootdir):
27 return os.path.join(rootdir, "cargo_home", "bitbake")
28
29 def supports(self, ud, d):
30 """
31 Check to see if a given url is for this fetcher
32 """
33 return ud.type in ['crate']
34
35 def recommends_checksum(self, urldata):
36 return True
37
38 def urldata_init(self, ud, d):
39 """
40 Sets up to download the respective crate from crates.io
41 """
42
43 if ud.type == 'crate':
44 self._crate_urldata_init(ud, d)
45
46 super(Crate, self).urldata_init(ud, d)
47
48 def _crate_urldata_init(self, ud, d):
49 """
50 Sets up the download for a crate
51 """
52
53 # URL syntax is: crate://NAME/VERSION
54 # break the URL apart by /
55 parts = ud.url.split('/')
56 if len(parts) < 5:
57 raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
58
59 # version is expected to be the last token
60 # but ignore possible url parameters which will be used
61 # by the top fetcher class
62 version = parts[-1].split(";")[0]
63 # second to last field is name
64 name = parts[-2]
65 # host (this is to allow custom crate registries to be specified
66 host = '/'.join(parts[2:-2])
67
68 # if using upstream just fix it up nicely
69 if host == 'crates.io':
70 host = 'crates.io/api/v1/crates'
71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
74 if 'name' not in ud.parm:
75 ud.parm['name'] = '%s-%s' % (name, version)
76
77 logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
78
79 def unpack(self, ud, rootdir, d):
80 """
81 Uses the crate to build the necessary paths for cargo to utilize it
82 """
83 if ud.type == 'crate':
84 return self._crate_unpack(ud, rootdir, d)
85 else:
86 super(Crate, self).unpack(ud, rootdir, d)
87
88 def _crate_unpack(self, ud, rootdir, d):
89 """
90 Unpacks a crate
91 """
92 thefile = ud.localpath
93
94 # possible metadata we need to write out
95 metadata = {}
96
97 # change to the rootdir to unpack but save the old working dir
98 save_cwd = os.getcwd()
99 os.chdir(rootdir)
100
101 bp = d.getVar('BP')
102 if bp == ud.parm.get('name'):
103 cmd = "tar -xz --no-same-owner -f %s" % thefile
104 ud.unpack_tracer.unpack("crate-extract", rootdir)
105 else:
106 cargo_bitbake = self._cargo_bitbake_path(rootdir)
107 ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
108
109 cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
110
111 # ensure we've got these paths made
112 bb.utils.mkdirhier(cargo_bitbake)
113
114 # generate metadata necessary
115 with open(thefile, 'rb') as f:
116 # get the SHA256 of the original tarball
117 tarhash = hashlib.sha256(f.read()).hexdigest()
118
119 metadata['files'] = {}
120 metadata['package'] = tarhash
121
122 path = d.getVar('PATH')
123 if path:
124 cmd = "PATH=\"%s\" %s" % (path, cmd)
125 bb.note("Unpacking %s to %s/" % (thefile, os.getcwd()))
126
127 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
128
129 os.chdir(save_cwd)
130
131 if ret != 0:
132 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
133
134 # if we have metadata to write out..
135 if len(metadata) > 0:
136 cratepath = os.path.splitext(os.path.basename(thefile))[0]
137 bbpath = self._cargo_bitbake_path(rootdir)
138 mdfile = '.cargo-checksum.json'
139 mdpath = os.path.join(bbpath, cratepath, mdfile)
140 with open(mdpath, "w") as f:
141 json.dump(metadata, f)
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
new file mode 100644
index 0000000000..eb3e0c6a6b
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -0,0 +1,102 @@
1"""
2BitBake 'Fetch' implementation for Google Cloup Platform Storage.
3
4Class for fetching files from Google Cloud Storage using the
5Google Cloud Storage Python Client. The GCS Python Client must
6be correctly installed, configured and authenticated prior to use.
7Additionally, gsutil must also be installed.
8
9"""
10
11# Copyright (C) 2023, Snap Inc.
12#
13# Based in part on bb.fetch2.s3:
14# Copyright (C) 2017 Andre McCurdy
15#
16# SPDX-License-Identifier: GPL-2.0-only
17#
18# Based on functions from the base bb module, Copyright 2003 Holger Schurig
19
20import os
21import bb
22import urllib.parse, urllib.error
23from bb.fetch2 import FetchMethod
24from bb.fetch2 import FetchError
25from bb.fetch2 import logger
26from bb.fetch2 import runfetchcmd
27
28class GCP(FetchMethod):
29 """
30 Class to fetch urls via GCP's Python API.
31 """
32 def __init__(self):
33 self.gcp_client = None
34
35 def supports(self, ud, d):
36 """
37 Check to see if a given url can be fetched with GCP.
38 """
39 return ud.type in ['gs']
40
41 def recommends_checksum(self, urldata):
42 return True
43
44 def urldata_init(self, ud, d):
45 if 'downloadfilename' in ud.parm:
46 ud.basename = ud.parm['downloadfilename']
47 else:
48 ud.basename = os.path.basename(ud.path)
49
50 ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
51 ud.basecmd = "gsutil stat"
52
53 def get_gcp_client(self):
54 from google.cloud import storage
55 self.gcp_client = storage.Client(project=None)
56
57 def download(self, ud, d):
58 """
59 Fetch urls using the GCP API.
60 Assumes localpath was called first.
61 """
62 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
63 if self.gcp_client is None:
64 self.get_gcp_client()
65
66 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
67 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
68
69 # Path sometimes has leading slash, so strip it
70 path = ud.path.lstrip("/")
71 blob = self.gcp_client.bucket(ud.host).blob(path)
72 blob.download_to_filename(ud.localpath)
73
74 # Additional sanity checks copied from the wget class (although there
75 # are no known issues which mean these are required, treat the GCP API
76 # tool with a little healthy suspicion).
77 if not os.path.exists(ud.localpath):
78 raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
79
80 if os.path.getsize(ud.localpath) == 0:
81 os.remove(ud.localpath)
82 raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
83
84 return True
85
86 def checkstatus(self, fetch, ud, d):
87 """
88 Check the status of a URL.
89 """
90 logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
91 if self.gcp_client is None:
92 self.get_gcp_client()
93
94 bb.fetch2.check_network_access(d, ud.basecmd, f"gs://{ud.host}{ud.path}")
95 runfetchcmd("%s %s" % (ud.basecmd, f"gs://{ud.host}{ud.path}"), d)
96
97 # Path sometimes has leading slash, so strip it
98 path = ud.path.lstrip("/")
99 if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
100 raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
101 else:
102 return True
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index e3ba80a3f5..c7ff769fdf 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -44,13 +44,27 @@ Supported SRC_URI options are:
44 44
45- nobranch 45- nobranch
46 Don't check the SHA validation for branch. set this option for the recipe 46 Don't check the SHA validation for branch. set this option for the recipe
47 referring to commit which is valid in tag instead of branch. 47 referring to commit which is valid in any namespace (branch, tag, ...)
48 instead of branch.
48 The default is "0", set nobranch=1 if needed. 49 The default is "0", set nobranch=1 if needed.
49 50
51- subpath
52 Limit the checkout to a specific subpath of the tree.
53 By default, checkout the whole tree, set subpath=<path> if needed
54
55- destsuffix
56 The name of the path in which to place the checkout.
57 By default, the path is git/, set destsuffix=<suffix> if needed
58
50- usehead 59- usehead
51 For local git:// urls to use the current branch HEAD as the revision for use with 60 For local git:// urls to use the current branch HEAD as the revision for use with
52 AUTOREV. Implies nobranch. 61 AUTOREV. Implies nobranch.
53 62
63- lfs
64 Enable the checkout to use LFS for large files. This will download all LFS files
65 in the download step, as the unpack step does not have network access.
66 The default is "1", set lfs=0 to skip.
67
54""" 68"""
55 69
56# Copyright (C) 2005 Richard Purdie 70# Copyright (C) 2005 Richard Purdie
@@ -64,14 +78,20 @@ import fnmatch
64import os 78import os
65import re 79import re
66import shlex 80import shlex
81import shutil
67import subprocess 82import subprocess
68import tempfile 83import tempfile
69import bb 84import bb
70import bb.progress 85import bb.progress
86from contextlib import contextmanager
71from bb.fetch2 import FetchMethod 87from bb.fetch2 import FetchMethod
72from bb.fetch2 import runfetchcmd 88from bb.fetch2 import runfetchcmd
73from bb.fetch2 import logger 89from bb.fetch2 import logger
90from bb.fetch2 import trusted_network
91
74 92
93sha1_re = re.compile(r'^[0-9a-f]{40}$')
94slash_re = re.compile(r"/+")
75 95
76class GitProgressHandler(bb.progress.LineFilterProgressHandler): 96class GitProgressHandler(bb.progress.LineFilterProgressHandler):
77 """Extract progress information from git output""" 97 """Extract progress information from git output"""
@@ -130,6 +150,9 @@ class Git(FetchMethod):
130 def supports_checksum(self, urldata): 150 def supports_checksum(self, urldata):
131 return False 151 return False
132 152
153 def cleanup_upon_failure(self):
154 return False
155
133 def urldata_init(self, ud, d): 156 def urldata_init(self, ud, d):
134 """ 157 """
135 init git specific variable within url data 158 init git specific variable within url data
@@ -141,6 +164,11 @@ class Git(FetchMethod):
141 ud.proto = 'file' 164 ud.proto = 'file'
142 else: 165 else:
143 ud.proto = "git" 166 ud.proto = "git"
167 if ud.host == "github.com" and ud.proto == "git":
168 # github stopped supporting git protocol
169 # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
170 ud.proto = "https"
171 bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
144 172
145 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): 173 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
146 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) 174 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -164,11 +192,18 @@ class Git(FetchMethod):
164 ud.nocheckout = 1 192 ud.nocheckout = 1
165 193
166 ud.unresolvedrev = {} 194 ud.unresolvedrev = {}
167 branches = ud.parm.get("branch", "master").split(',') 195 branches = ud.parm.get("branch", "").split(',')
196 if branches == [""] and not ud.nobranch:
197 bb.warn("URL: %s does not set any branch parameter. The future default branch used by tools and repositories is uncertain and we will therefore soon require this is set in all git urls." % ud.url)
198 branches = ["master"]
168 if len(branches) != len(ud.names): 199 if len(branches) != len(ud.names):
169 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) 200 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
170 201
171 ud.cloneflags = "-s -n" 202 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
203
204 ud.cloneflags = "-n"
205 if not ud.noshared:
206 ud.cloneflags += " -s"
172 if ud.bareclone: 207 if ud.bareclone:
173 ud.cloneflags += " --mirror" 208 ud.cloneflags += " --mirror"
174 209
@@ -227,7 +262,7 @@ class Git(FetchMethod):
227 for name in ud.names: 262 for name in ud.names:
228 ud.unresolvedrev[name] = 'HEAD' 263 ud.unresolvedrev[name] = 'HEAD'
229 264
230 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" 265 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all"
231 266
232 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" 267 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
233 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable 268 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -236,20 +271,20 @@ class Git(FetchMethod):
236 ud.setup_revisions(d) 271 ud.setup_revisions(d)
237 272
238 for name in ud.names: 273 for name in ud.names:
239 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one 274 # Ensure any revision that doesn't look like a SHA-1 is translated into one
240 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): 275 if not sha1_re.match(ud.revisions[name] or ''):
241 if ud.revisions[name]: 276 if ud.revisions[name]:
242 ud.unresolvedrev[name] = ud.revisions[name] 277 ud.unresolvedrev[name] = ud.revisions[name]
243 ud.revisions[name] = self.latest_revision(ud, d, name) 278 ud.revisions[name] = self.latest_revision(ud, d, name)
244 279
245 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_')) 280 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
246 if gitsrcname.startswith('.'): 281 if gitsrcname.startswith('.'):
247 gitsrcname = gitsrcname[1:] 282 gitsrcname = gitsrcname[1:]
248 283
249 # for rebaseable git repo, it is necessary to keep mirror tar ball 284 # For a rebaseable git repo, it is necessary to keep a mirror tar ball
250 # per revision, so that even the revision disappears from the 285 # per revision, so that even if the revision disappears from the
251 # upstream repo in the future, the mirror will remain intact and still 286 # upstream repo in the future, the mirror will remain intact and still
252 # contains the revision 287 # contain the revision
253 if ud.rebaseable: 288 if ud.rebaseable:
254 for name in ud.names: 289 for name in ud.names:
255 gitsrcname = gitsrcname + '_' + ud.revisions[name] 290 gitsrcname = gitsrcname + '_' + ud.revisions[name]
@@ -293,7 +328,10 @@ class Git(FetchMethod):
293 return ud.clonedir 328 return ud.clonedir
294 329
295 def need_update(self, ud, d): 330 def need_update(self, ud, d):
296 return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud) 331 return self.clonedir_need_update(ud, d) \
332 or self.shallow_tarball_need_update(ud) \
333 or self.tarball_need_update(ud) \
334 or self.lfs_need_update(ud, d)
297 335
298 def clonedir_need_update(self, ud, d): 336 def clonedir_need_update(self, ud, d):
299 if not os.path.exists(ud.clonedir): 337 if not os.path.exists(ud.clonedir):
@@ -305,6 +343,15 @@ class Git(FetchMethod):
305 return True 343 return True
306 return False 344 return False
307 345
346 def lfs_need_update(self, ud, d):
347 if self.clonedir_need_update(ud, d):
348 return True
349
350 for name in ud.names:
351 if not self._lfs_objects_downloaded(ud, d, name, ud.clonedir):
352 return True
353 return False
354
308 def clonedir_need_shallow_revs(self, ud, d): 355 def clonedir_need_shallow_revs(self, ud, d):
309 for rev in ud.shallow_revs: 356 for rev in ud.shallow_revs:
310 try: 357 try:
@@ -324,6 +371,16 @@ class Git(FetchMethod):
324 # is not possible 371 # is not possible
325 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")): 372 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
326 return True 373 return True
374 # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
375 # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
376 # we need to try premirrors first as using upstream is destined to fail.
377 if not trusted_network(d, ud.url):
378 return True
379 # the following check is to ensure incremental fetch in downloads, this is
380 # because the premirror might be old and does not contain the new rev required,
381 # and this will cause a total removal and new clone. So if we can reach to
382 # network, we prefer upstream over premirror, though the premirror might contain
383 # the new rev.
327 if os.path.exists(ud.clonedir): 384 if os.path.exists(ud.clonedir):
328 return False 385 return False
329 return True 386 return True
@@ -337,17 +394,54 @@ class Git(FetchMethod):
337 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): 394 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
338 ud.localpath = ud.fullshallow 395 ud.localpath = ud.fullshallow
339 return 396 return
340 elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir): 397 elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
341 bb.utils.mkdirhier(ud.clonedir) 398 if not os.path.exists(ud.clonedir):
342 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) 399 bb.utils.mkdirhier(ud.clonedir)
343 400 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
401 else:
402 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
403 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
404 output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
405 if 'mirror' in output:
406 runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
407 runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
408 fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
409 runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
344 repourl = self._get_repo_url(ud) 410 repourl = self._get_repo_url(ud)
345 411
412 needs_clone = False
413 if os.path.exists(ud.clonedir):
414 # The directory may exist, but not be the top level of a bare git
415 # repository in which case it needs to be deleted and re-cloned.
416 try:
417 # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
418 output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
419 toplevel = output.rstrip()
420
421 if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
422 logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
423 needs_clone = True
424 except bb.fetch2.FetchError as e:
425 logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
426 needs_clone = True
427 except FileNotFoundError as e:
428 logger.warning("%s", e)
429 needs_clone = True
430
431 if needs_clone:
432 shutil.rmtree(ud.clonedir)
433 else:
434 needs_clone = True
435
346 # If the repo still doesn't exist, fallback to cloning it 436 # If the repo still doesn't exist, fallback to cloning it
347 if not os.path.exists(ud.clonedir): 437 if needs_clone:
348 # We do this since git will use a "-l" option automatically for local urls where possible 438 # We do this since git will use a "-l" option automatically for local urls where possible,
439 # but it doesn't work when git/objects is a symlink, only works when it is a directory.
349 if repourl.startswith("file://"): 440 if repourl.startswith("file://"):
350 repourl = repourl[7:] 441 repourl_path = repourl[7:]
442 objects = os.path.join(repourl_path, 'objects')
443 if os.path.isdir(objects) and not os.path.islink(objects):
444 repourl = repourl_path
351 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir) 445 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
352 if ud.proto.lower() != 'file': 446 if ud.proto.lower() != 'file':
353 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 447 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
@@ -361,7 +455,11 @@ class Git(FetchMethod):
361 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) 455 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
362 456
363 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir) 457 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
364 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl)) 458
459 if ud.nobranch:
460 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
461 else:
462 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
365 if ud.proto.lower() != 'file': 463 if ud.proto.lower() != 'file':
366 bb.fetch2.check_network_access(d, fetch_cmd, ud.url) 464 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
367 progresshandler = GitProgressHandler(d) 465 progresshandler = GitProgressHandler(d)
@@ -384,17 +482,16 @@ class Git(FetchMethod):
384 if missing_rev: 482 if missing_rev:
385 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 483 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
386 484
387 if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud): 485 if self.lfs_need_update(ud, d):
388 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching 486 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
389 # of all LFS blobs needed at the the srcrev. 487 # of all LFS blobs needed at the srcrev.
390 # 488 #
391 # It would be nice to just do this inline here by running 'git-lfs fetch' 489 # It would be nice to just do this inline here by running 'git-lfs fetch'
392 # on the bare clonedir, but that operation requires a working copy on some 490 # on the bare clonedir, but that operation requires a working copy on some
393 # releases of Git LFS. 491 # releases of Git LFS.
394 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 492 with tempfile.TemporaryDirectory(dir=d.getVar('DL_DIR')) as tmpdir:
395 try:
396 # Do the checkout. This implicitly involves a Git LFS fetch. 493 # Do the checkout. This implicitly involves a Git LFS fetch.
397 self.unpack(ud, tmpdir, d) 494 Git.unpack(self, ud, tmpdir, d)
398 495
399 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into 496 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
400 # the bare clonedir. 497 # the bare clonedir.
@@ -408,12 +505,24 @@ class Git(FetchMethod):
408 # Only do this if the unpack resulted in a .git/lfs directory being 505 # Only do this if the unpack resulted in a .git/lfs directory being
409 # created; this only happens if at least one blob needed to be 506 # created; this only happens if at least one blob needed to be
410 # downloaded. 507 # downloaded.
411 if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")): 508 if os.path.exists(os.path.join(ud.destdir, ".git", "lfs")):
412 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir) 509 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/.git" % ud.destdir)
413 finally:
414 bb.utils.remove(tmpdir, recurse=True)
415 510
416 def build_mirror_data(self, ud, d): 511 def build_mirror_data(self, ud, d):
512
513 # Create as a temp file and move atomically into position to avoid races
514 @contextmanager
515 def create_atomic(filename):
516 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
517 try:
518 yield tfile
519 umask = os.umask(0o666)
520 os.umask(umask)
521 os.chmod(tfile, (0o666 & ~umask))
522 os.rename(tfile, filename)
523 finally:
524 os.close(fd)
525
417 if ud.shallow and ud.write_shallow_tarballs: 526 if ud.shallow and ud.write_shallow_tarballs:
418 if not os.path.exists(ud.fullshallow): 527 if not os.path.exists(ud.fullshallow):
419 if os.path.islink(ud.fullshallow): 528 if os.path.islink(ud.fullshallow):
@@ -424,7 +533,8 @@ class Git(FetchMethod):
424 self.clone_shallow_local(ud, shallowclone, d) 533 self.clone_shallow_local(ud, shallowclone, d)
425 534
426 logger.info("Creating tarball of git repository") 535 logger.info("Creating tarball of git repository")
427 runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone) 536 with create_atomic(ud.fullshallow) as tfile:
537 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
428 runfetchcmd("touch %s.done" % ud.fullshallow, d) 538 runfetchcmd("touch %s.done" % ud.fullshallow, d)
429 finally: 539 finally:
430 bb.utils.remove(tempdir, recurse=True) 540 bb.utils.remove(tempdir, recurse=True)
@@ -433,7 +543,11 @@ class Git(FetchMethod):
433 os.unlink(ud.fullmirror) 543 os.unlink(ud.fullmirror)
434 544
435 logger.info("Creating tarball of git repository") 545 logger.info("Creating tarball of git repository")
436 runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) 546 with create_atomic(ud.fullmirror) as tfile:
547 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
548 quiet=True, workdir=ud.clonedir)
549 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
550 % (tfile, mtime), d, workdir=ud.clonedir)
437 runfetchcmd("touch %s.done" % ud.fullmirror, d) 551 runfetchcmd("touch %s.done" % ud.fullmirror, d)
438 552
439 def clone_shallow_local(self, ud, dest, d): 553 def clone_shallow_local(self, ud, dest, d):
@@ -495,18 +609,31 @@ class Git(FetchMethod):
495 def unpack(self, ud, destdir, d): 609 def unpack(self, ud, destdir, d):
496 """ unpack the downloaded src to destdir""" 610 """ unpack the downloaded src to destdir"""
497 611
498 subdir = ud.parm.get("subpath", "") 612 subdir = ud.parm.get("subdir")
499 if subdir != "": 613 subpath = ud.parm.get("subpath")
500 readpathspec = ":%s" % subdir 614 readpathspec = ""
501 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/')) 615 def_destsuffix = "git/"
502 else: 616
503 readpathspec = "" 617 if subpath:
504 def_destsuffix = "git/" 618 readpathspec = ":%s" % subpath
619 def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
620
621 if subdir:
622 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
623 if os.path.isabs(subdir):
624 if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
625 raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
626 destdir = subdir
627 else:
628 destdir = os.path.join(destdir, subdir)
629 def_destsuffix = ""
505 630
506 destsuffix = ud.parm.get("destsuffix", def_destsuffix) 631 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
507 destdir = ud.destdir = os.path.join(destdir, destsuffix) 632 destdir = ud.destdir = os.path.join(destdir, destsuffix)
508 if os.path.exists(destdir): 633 if os.path.exists(destdir):
509 bb.utils.prunedir(destdir) 634 bb.utils.prunedir(destdir)
635 if not ud.bareclone:
636 ud.unpack_tracer.unpack("git", destdir)
510 637
511 need_lfs = self._need_lfs(ud) 638 need_lfs = self._need_lfs(ud)
512 639
@@ -516,13 +643,12 @@ class Git(FetchMethod):
516 source_found = False 643 source_found = False
517 source_error = [] 644 source_error = []
518 645
519 if not source_found: 646 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
520 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d) 647 if clonedir_is_up_to_date:
521 if clonedir_is_up_to_date: 648 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
522 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d) 649 source_found = True
523 source_found = True 650 else:
524 else: 651 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
525 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
526 652
527 if not source_found: 653 if not source_found:
528 if ud.shallow: 654 if ud.shallow:
@@ -546,9 +672,11 @@ class Git(FetchMethod):
546 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl)) 672 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
547 elif not need_lfs: 673 elif not need_lfs:
548 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) 674 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
675 else:
676 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
549 677
550 if not ud.nocheckout: 678 if not ud.nocheckout:
551 if subdir != "": 679 if subpath:
552 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, 680 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
553 workdir=destdir) 681 workdir=destdir)
554 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) 682 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
@@ -597,6 +725,35 @@ class Git(FetchMethod):
597 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 725 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
598 return output.split()[0] != "0" 726 return output.split()[0] != "0"
599 727
728 def _lfs_objects_downloaded(self, ud, d, name, wd):
729 """
730 Verifies whether the LFS objects for requested revisions have already been downloaded
731 """
732 # Bail out early if this repository doesn't use LFS
733 if not self._need_lfs(ud) or not self._contains_lfs(ud, d, wd):
734 return True
735
736 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
737 # existence.
738 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
739 cmd = "%s lfs ls-files -l %s" \
740 % (ud.basecmd, ud.revisions[name])
741 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
742 # Do not do any further matching if no objects are managed by LFS
743 if not output:
744 return True
745
746 # Match all lines beginning with the hexadecimal OID
747 oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
748 for line in output.split("\n"):
749 oid = re.search(oid_regex, line)
750 if not oid:
751 bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
752 if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
753 return False
754
755 return True
756
600 def _need_lfs(self, ud): 757 def _need_lfs(self, ud):
601 return ud.parm.get("lfs", "1") == "1" 758 return ud.parm.get("lfs", "1") == "1"
602 759
@@ -605,13 +762,11 @@ class Git(FetchMethod):
605 Check if the repository has 'lfs' (large file) content 762 Check if the repository has 'lfs' (large file) content
606 """ 763 """
607 764
608 if not ud.nobranch: 765 if ud.nobranch:
609 branchname = ud.branches[ud.names[0]] 766 # If no branch is specified, use the current git commit
610 else: 767 refname = self._build_revision(ud, d, ud.names[0])
611 branchname = "master" 768 elif wd == ud.clonedir:
612 769 # The bare clonedir doesn't use the remote names; it has the branch immediately.
613 # The bare clonedir doesn't use the remote names; it has the branch immediately.
614 if wd == ud.clonedir:
615 refname = ud.branches[ud.names[0]] 770 refname = ud.branches[ud.names[0]]
616 else: 771 else:
617 refname = "origin/%s" % ud.branches[ud.names[0]] 772 refname = "origin/%s" % ud.branches[ud.names[0]]
@@ -654,7 +809,6 @@ class Git(FetchMethod):
654 Return a unique key for the url 809 Return a unique key for the url
655 """ 810 """
656 # Collapse adjacent slashes 811 # Collapse adjacent slashes
657 slash_re = re.compile(r"/+")
658 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name] 812 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name]
659 813
660 def _lsremote(self, ud, d, search): 814 def _lsremote(self, ud, d, search):
@@ -687,6 +841,12 @@ class Git(FetchMethod):
687 """ 841 """
688 Compute the HEAD revision for the url 842 Compute the HEAD revision for the url
689 """ 843 """
844 if not d.getVar("__BBSRCREV_SEEN"):
845 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev[name], ud.host+ud.path))
846
847 # Ensure we mark as not cached
848 bb.fetch2.mark_recipe_nocache(d)
849
690 output = self._lsremote(ud, d, "") 850 output = self._lsremote(ud, d, "")
691 # Tags of the form ^{} may not work, need to fallback to other form 851 # Tags of the form ^{} may not work, need to fallback to other form
692 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: 852 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
@@ -711,38 +871,42 @@ class Git(FetchMethod):
711 """ 871 """
712 pupver = ('', '') 872 pupver = ('', '')
713 873
714 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
715 try: 874 try:
716 output = self._lsremote(ud, d, "refs/tags/*") 875 output = self._lsremote(ud, d, "refs/tags/*")
717 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e: 876 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
718 bb.note("Could not list remote: %s" % str(e)) 877 bb.note("Could not list remote: %s" % str(e))
719 return pupver 878 return pupver
720 879
880 rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
881 pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
882 nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
883
721 verstring = "" 884 verstring = ""
722 revision = ""
723 for line in output.split("\n"): 885 for line in output.split("\n"):
724 if not line: 886 if not line:
725 break 887 break
726 888
727 tag_head = line.split("/")[-1] 889 m = rev_tag_re.match(line)
890 if not m:
891 continue
892
893 (revision, tag) = m.groups()
894
728 # Ignore non-released branches 895 # Ignore non-released branches
729 m = re.search(r"(alpha|beta|rc|final)+", tag_head) 896 if nonrel_re.search(tag):
730 if m:
731 continue 897 continue
732 898
733 # search for version in the line 899 # search for version in the line
734 tag = tagregex.search(tag_head) 900 m = pver_re.search(tag)
735 if tag is None: 901 if not m:
736 continue 902 continue
737 903
738 tag = tag.group('pver') 904 pver = m.group('pver').replace("_", ".")
739 tag = tag.replace("_", ".")
740 905
741 if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0: 906 if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
742 continue 907 continue
743 908
744 verstring = tag 909 verstring = pver
745 revision = line.split()[0]
746 pupver = (verstring, revision) 910 pupver = (verstring, revision)
747 911
748 return pupver 912 return pupver
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index a4527bf364..f7f3af7212 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -88,9 +88,9 @@ class GitSM(Git):
88 subrevision[m] = module_hash.split()[2] 88 subrevision[m] = module_hash.split()[2]
89 89
90 # Convert relative to absolute uri based on parent uri 90 # Convert relative to absolute uri based on parent uri
91 if uris[m].startswith('..'): 91 if uris[m].startswith('..') or uris[m].startswith('./'):
92 newud = copy.copy(ud) 92 newud = copy.copy(ud)
93 newud.path = os.path.realpath(os.path.join(newud.path, uris[m])) 93 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
94 uris[m] = Git._get_repo_url(self, newud) 94 uris[m] = Git._get_repo_url(self, newud)
95 95
96 for module in submodules: 96 for module in submodules:
@@ -115,10 +115,21 @@ class GitSM(Git):
115 # This has to be a file reference 115 # This has to be a file reference
116 proto = "file" 116 proto = "file"
117 url = "gitsm://" + uris[module] 117 url = "gitsm://" + uris[module]
118 if url.endswith("{}{}".format(ud.host, ud.path)):
119 raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \
120 "Consider using git fetcher instead.")
118 121
119 url += ';protocol=%s' % proto 122 url += ';protocol=%s' % proto
120 url += ";name=%s" % module 123 url += ";name=%s" % module
121 url += ";subpath=%s" % module 124 url += ";subpath=%s" % module
125 url += ";nobranch=1"
126 url += ";lfs=%s" % self._need_lfs(ud)
127 # Note that adding "user=" here to give credentials to the
128 # submodule is not supported. Since using SRC_URI to give git://
129 # URL a password is not supported, one have to use one of the
130 # recommended way (eg. ~/.netrc or SSH config) which does specify
131 # the user (See comment in git.py).
132 # So, we will not take patches adding "user=" support here.
122 133
123 ld = d.createCopy() 134 ld = d.createCopy()
124 # Not necessary to set SRC_URI, since we're passing the URI to 135 # Not necessary to set SRC_URI, since we're passing the URI to
@@ -140,16 +151,6 @@ class GitSM(Git):
140 if Git.need_update(self, ud, d): 151 if Git.need_update(self, ud, d):
141 return True 152 return True
142 153
143 try:
144 # Check for the nugget dropped by the download operation
145 known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
146 (ud.basecmd), d, workdir=ud.clonedir)
147
148 if ud.revisions[ud.names[0]] in known_srcrevs.split():
149 return False
150 except bb.fetch2.FetchError:
151 pass
152
153 need_update_list = [] 154 need_update_list = []
154 def need_update_submodule(ud, url, module, modpath, workdir, d): 155 def need_update_submodule(ud, url, module, modpath, workdir, d):
155 url += ";bareclone=1;nobranch=1" 156 url += ";bareclone=1;nobranch=1"
@@ -172,13 +173,8 @@ class GitSM(Git):
172 shutil.rmtree(tmpdir) 173 shutil.rmtree(tmpdir)
173 else: 174 else:
174 self.process_submodules(ud, ud.clonedir, need_update_submodule, d) 175 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
175 if len(need_update_list) == 0:
176 # We already have the required commits of all submodules. Drop
177 # a nugget so we don't need to check again.
178 runfetchcmd("%s config --add bitbake.srcrev %s" % \
179 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
180 176
181 if len(need_update_list) > 0: 177 if need_update_list:
182 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 178 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
183 return True 179 return True
184 180
@@ -209,9 +205,6 @@ class GitSM(Git):
209 shutil.rmtree(tmpdir) 205 shutil.rmtree(tmpdir)
210 else: 206 else:
211 self.process_submodules(ud, ud.clonedir, download_submodule, d) 207 self.process_submodules(ud, ud.clonedir, download_submodule, d)
212 # Drop a nugget for the srcrev we've fetched (used by need_update)
213 runfetchcmd("%s config --add bitbake.srcrev %s" % \
214 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
215 208
216 def unpack(self, ud, destdir, d): 209 def unpack(self, ud, destdir, d):
217 def unpack_submodules(ud, url, module, modpath, workdir, d): 210 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -225,6 +218,10 @@ class GitSM(Git):
225 218
226 try: 219 try:
227 newfetch = Fetch([url], d, cache=False) 220 newfetch = Fetch([url], d, cache=False)
221 # modpath is needed by unpack tracer to calculate submodule
222 # checkout dir
223 new_ud = newfetch.ud[url]
224 new_ud.modpath = modpath
228 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) 225 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
229 except Exception as e: 226 except Exception as e:
230 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) 227 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
@@ -250,10 +247,12 @@ class GitSM(Git):
250 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) 247 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
251 248
252 if not ud.bareclone and ret: 249 if not ud.bareclone and ret:
253 # All submodules should already be downloaded and configured in the tree. This simply sets 250 # All submodules should already be downloaded and configured in the tree. This simply
254 # up the configuration and checks out the files. The main project config should remain 251 # sets up the configuration and checks out the files. The main project config should
255 # unmodified, and no download from the internet should occur. 252 # remain unmodified, and no download from the internet should occur. As such, lfs smudge
256 runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) 253 # should also be skipped as these files were already smudged in the fetch stage if lfs
254 # was enabled.
255 runfetchcmd("GIT_LFS_SKIP_SMUDGE=1 %s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
257 256
258 def implicit_urldata(self, ud, d): 257 def implicit_urldata(self, ud, d):
259 import shutil, subprocess, tempfile 258 import shutil, subprocess, tempfile
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 063e13008a..cbff8c490c 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@ class Hg(FetchMethod):
242 revflag = "-r %s" % ud.revision 242 revflag = "-r %s" % ud.revision
243 subdir = ud.parm.get("destsuffix", ud.module) 243 subdir = ud.parm.get("destsuffix", ud.module)
244 codir = "%s/%s" % (destdir, subdir) 244 codir = "%s/%s" % (destdir, subdir)
245 ud.unpack_tracer.unpack("hg", codir)
245 246
246 scmdata = ud.parm.get("scmdata", "") 247 scmdata = ud.parm.get("scmdata", "")
247 if scmdata != "nokeep": 248 if scmdata != "nokeep":
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index e7d1c8c58f..7d7668110e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -41,9 +41,9 @@ class Local(FetchMethod):
41 """ 41 """
42 Return the local filename of a given url assuming a successful fetch. 42 Return the local filename of a given url assuming a successful fetch.
43 """ 43 """
44 return self.localpaths(urldata, d)[-1] 44 return self.localfile_searchpaths(urldata, d)[-1]
45 45
46 def localpaths(self, urldata, d): 46 def localfile_searchpaths(self, urldata, d):
47 """ 47 """
48 Return the local filename of a given url assuming a successful fetch. 48 Return the local filename of a given url assuming a successful fetch.
49 """ 49 """
@@ -51,18 +51,14 @@ class Local(FetchMethod):
51 path = urldata.decodedurl 51 path = urldata.decodedurl
52 newpath = path 52 newpath = path
53 if path[0] == "/": 53 if path[0] == "/":
54 logger.debug2("Using absolute %s" % (path))
54 return [path] 55 return [path]
55 filespath = d.getVar('FILESPATH') 56 filespath = d.getVar('FILESPATH')
56 if filespath: 57 if filespath:
57 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) 58 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
58 newpath, hist = bb.utils.which(filespath, path, history=True) 59 newpath, hist = bb.utils.which(filespath, path, history=True)
60 logger.debug2("Using %s for %s" % (newpath, path))
59 searched.extend(hist) 61 searched.extend(hist)
60 if not os.path.exists(newpath):
61 dldirfile = os.path.join(d.getVar("DL_DIR"), path)
62 logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
63 bb.utils.mkdirhier(os.path.dirname(dldirfile))
64 searched.append(dldirfile)
65 return searched
66 return searched 62 return searched
67 63
68 def need_update(self, ud, d): 64 def need_update(self, ud, d):
@@ -78,9 +74,7 @@ class Local(FetchMethod):
78 filespath = d.getVar('FILESPATH') 74 filespath = d.getVar('FILESPATH')
79 if filespath: 75 if filespath:
80 locations = filespath.split(":") 76 locations = filespath.split(":")
81 locations.append(d.getVar("DL_DIR")) 77 msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations)
82
83 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
84 raise FetchError(msg) 78 raise FetchError(msg)
85 79
86 return True 80 return True
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 47898509ff..15f3f19bc8 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -44,17 +44,24 @@ def npm_package(package):
44 """Convert the npm package name to remove unsupported character""" 44 """Convert the npm package name to remove unsupported character"""
45 # Scoped package names (with the @) use the same naming convention 45 # Scoped package names (with the @) use the same naming convention
46 # as the 'npm pack' command. 46 # as the 'npm pack' command.
47 if package.startswith("@"): 47 name = re.sub("/", "-", package)
48 return re.sub("/", "-", package[1:]) 48 name = name.lower()
49 return package 49 name = re.sub(r"[^\-a-z0-9]", "", name)
50 name = name.strip("-")
51 return name
52
50 53
51def npm_filename(package, version): 54def npm_filename(package, version):
52 """Get the filename of a npm package""" 55 """Get the filename of a npm package"""
53 return npm_package(package) + "-" + version + ".tgz" 56 return npm_package(package) + "-" + version + ".tgz"
54 57
55def npm_localfile(package, version): 58def npm_localfile(package, version=None):
56 """Get the local filename of a npm package""" 59 """Get the local filename of a npm package"""
57 return os.path.join("npm2", npm_filename(package, version)) 60 if version is not None:
61 filename = npm_filename(package, version)
62 else:
63 filename = package
64 return os.path.join("npm2", filename)
58 65
59def npm_integrity(integrity): 66def npm_integrity(integrity):
60 """ 67 """
@@ -69,41 +76,52 @@ def npm_unpack(tarball, destdir, d):
69 bb.utils.mkdirhier(destdir) 76 bb.utils.mkdirhier(destdir)
70 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) 77 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
71 cmd += " --no-same-owner" 78 cmd += " --no-same-owner"
79 cmd += " --delay-directory-restore"
72 cmd += " --strip-components=1" 80 cmd += " --strip-components=1"
73 runfetchcmd(cmd, d, workdir=destdir) 81 runfetchcmd(cmd, d, workdir=destdir)
82 runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir)
74 83
75class NpmEnvironment(object): 84class NpmEnvironment(object):
76 """ 85 """
77 Using a npm config file seems more reliable than using cli arguments. 86 Using a npm config file seems more reliable than using cli arguments.
78 This class allows to create a controlled environment for npm commands. 87 This class allows to create a controlled environment for npm commands.
79 """ 88 """
80 def __init__(self, d, configs=None): 89 def __init__(self, d, configs=[], npmrc=None):
81 self.d = d 90 self.d = d
82 self.configs = configs 91
92 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
93 for key, value in configs:
94 self.user_config.write("%s=%s\n" % (key, value))
95
96 if npmrc:
97 self.global_config_name = npmrc
98 else:
99 self.global_config_name = "/dev/null"
100
101 def __del__(self):
102 if self.user_config:
103 self.user_config.close()
83 104
84 def run(self, cmd, args=None, configs=None, workdir=None): 105 def run(self, cmd, args=None, configs=None, workdir=None):
85 """Run npm command in a controlled environment""" 106 """Run npm command in a controlled environment"""
86 with tempfile.TemporaryDirectory() as tmpdir: 107 with tempfile.TemporaryDirectory() as tmpdir:
87 d = bb.data.createCopy(self.d) 108 d = bb.data.createCopy(self.d)
109 d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching
88 d.setVar("HOME", tmpdir) 110 d.setVar("HOME", tmpdir)
89 111
90 cfgfile = os.path.join(tmpdir, "npmrc")
91
92 if not workdir: 112 if not workdir:
93 workdir = tmpdir 113 workdir = tmpdir
94 114
95 def _run(cmd): 115 def _run(cmd):
96 cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd 116 cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config.name) + cmd
97 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd 117 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd
98 return runfetchcmd(cmd, d, workdir=workdir) 118 return runfetchcmd(cmd, d, workdir=workdir)
99 119
100 if self.configs:
101 for key, value in self.configs:
102 _run("npm config set %s %s" % (key, shlex.quote(value)))
103
104 if configs: 120 if configs:
121 bb.warn("Use of configs argument of NpmEnvironment.run() function"
122 " is deprecated. Please use args argument instead.")
105 for key, value in configs: 123 for key, value in configs:
106 _run("npm config set %s %s" % (key, shlex.quote(value))) 124 cmd += " --%s=%s" % (key, shlex.quote(value))
107 125
108 if args: 126 if args:
109 for key, value in args: 127 for key, value in args:
@@ -142,12 +160,12 @@ class Npm(FetchMethod):
142 raise ParameterError("Invalid 'version' parameter", ud.url) 160 raise ParameterError("Invalid 'version' parameter", ud.url)
143 161
144 # Extract the 'registry' part of the url 162 # Extract the 'registry' part of the url
145 ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0]) 163 ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0])
146 164
147 # Using the 'downloadfilename' parameter as local filename 165 # Using the 'downloadfilename' parameter as local filename
148 # or the npm package name. 166 # or the npm package name.
149 if "downloadfilename" in ud.parm: 167 if "downloadfilename" in ud.parm:
150 ud.localfile = d.expand(ud.parm["downloadfilename"]) 168 ud.localfile = npm_localfile(d.expand(ud.parm["downloadfilename"]))
151 else: 169 else:
152 ud.localfile = npm_localfile(ud.package, ud.version) 170 ud.localfile = npm_localfile(ud.package, ud.version)
153 171
@@ -165,14 +183,14 @@ class Npm(FetchMethod):
165 183
166 def _resolve_proxy_url(self, ud, d): 184 def _resolve_proxy_url(self, ud, d):
167 def _npm_view(): 185 def _npm_view():
168 configs = [] 186 args = []
169 configs.append(("json", "true")) 187 args.append(("json", "true"))
170 configs.append(("registry", ud.registry)) 188 args.append(("registry", ud.registry))
171 pkgver = shlex.quote(ud.package + "@" + ud.version) 189 pkgver = shlex.quote(ud.package + "@" + ud.version)
172 cmd = ud.basecmd + " view %s" % pkgver 190 cmd = ud.basecmd + " view %s" % pkgver
173 env = NpmEnvironment(d) 191 env = NpmEnvironment(d)
174 check_network_access(d, cmd, ud.registry) 192 check_network_access(d, cmd, ud.registry)
175 view_string = env.run(cmd, configs=configs) 193 view_string = env.run(cmd, args=args)
176 194
177 if not view_string: 195 if not view_string:
178 raise FetchError("Unavailable package %s" % pkgver, ud.url) 196 raise FetchError("Unavailable package %s" % pkgver, ud.url)
@@ -280,6 +298,7 @@ class Npm(FetchMethod):
280 destsuffix = ud.parm.get("destsuffix", "npm") 298 destsuffix = ud.parm.get("destsuffix", "npm")
281 destdir = os.path.join(rootdir, destsuffix) 299 destdir = os.path.join(rootdir, destsuffix)
282 npm_unpack(ud.localpath, destdir, d) 300 npm_unpack(ud.localpath, destdir, d)
301 ud.unpack_tracer.unpack("npm", destdir)
283 302
284 def clean(self, ud, d): 303 def clean(self, ud, d):
285 """Clean any existing full or partial download""" 304 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index 0c3511d8ab..ff5f8dc755 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -24,11 +24,14 @@ import bb
24from bb.fetch2 import Fetch 24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod 25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError 26from bb.fetch2 import ParameterError
27from bb.fetch2 import runfetchcmd
27from bb.fetch2 import URI 28from bb.fetch2 import URI
28from bb.fetch2.npm import npm_integrity 29from bb.fetch2.npm import npm_integrity
29from bb.fetch2.npm import npm_localfile 30from bb.fetch2.npm import npm_localfile
30from bb.fetch2.npm import npm_unpack 31from bb.fetch2.npm import npm_unpack
31from bb.utils import is_semver 32from bb.utils import is_semver
33from bb.utils import lockfile
34from bb.utils import unlockfile
32 35
33def foreach_dependencies(shrinkwrap, callback=None, dev=False): 36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
34 """ 37 """
@@ -38,8 +41,9 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
38 with: 41 with:
39 name = the package name (string) 42 name = the package name (string)
40 params = the package parameters (dictionary) 43 params = the package parameters (dictionary)
41 deptree = the package dependency tree (array of strings) 44 destdir = the destination of the package (string)
42 """ 45 """
46 # For handling old style dependencies entries in shinkwrap files
43 def _walk_deps(deps, deptree): 47 def _walk_deps(deps, deptree):
44 for name in deps: 48 for name in deps:
45 subtree = [*deptree, name] 49 subtree = [*deptree, name]
@@ -49,9 +53,22 @@ def foreach_dependencies(shrinkwrap, callback=None, dev=False):
49 continue 53 continue
50 elif deps[name].get("bundled", False): 54 elif deps[name].get("bundled", False):
51 continue 55 continue
52 callback(name, deps[name], subtree) 56 destsubdirs = [os.path.join("node_modules", dep) for dep in subtree]
53 57 destsuffix = os.path.join(*destsubdirs)
54 _walk_deps(shrinkwrap.get("dependencies", {}), []) 58 callback(name, deps[name], destsuffix)
59
60 # packages entry means new style shrinkwrap file, else use dependencies
61 packages = shrinkwrap.get("packages", None)
62 if packages is not None:
63 for package in packages:
64 if package != "":
65 name = package.split('node_modules/')[-1]
66 package_infos = packages.get(package, {})
67 if dev == False and package_infos.get("dev", False):
68 continue
69 callback(name, package_infos, package)
70 else:
71 _walk_deps(shrinkwrap.get("dependencies", {}), [])
55 72
56class NpmShrinkWrap(FetchMethod): 73class NpmShrinkWrap(FetchMethod):
57 """Class to fetch all package from a shrinkwrap file""" 74 """Class to fetch all package from a shrinkwrap file"""
@@ -72,19 +89,22 @@ class NpmShrinkWrap(FetchMethod):
72 # Resolve the dependencies 89 # Resolve the dependencies
73 ud.deps = [] 90 ud.deps = []
74 91
75 def _resolve_dependency(name, params, deptree): 92 def _resolve_dependency(name, params, destsuffix):
76 url = None 93 url = None
77 localpath = None 94 localpath = None
78 extrapaths = [] 95 extrapaths = []
79 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] 96 unpack = True
80 destsuffix = os.path.join(*destsubdirs)
81 97
82 integrity = params.get("integrity", None) 98 integrity = params.get("integrity", None)
83 resolved = params.get("resolved", None) 99 resolved = params.get("resolved", None)
84 version = params.get("version", None) 100 version = params.get("version", None)
85 101
86 # Handle registry sources 102 # Handle registry sources
87 if is_semver(version) and resolved and integrity: 103 if is_semver(version) and integrity:
104 # Handle duplicate dependencies without url
105 if not resolved:
106 return
107
88 localfile = npm_localfile(name, version) 108 localfile = npm_localfile(name, version)
89 109
90 uri = URI(resolved) 110 uri = URI(resolved)
@@ -109,7 +129,7 @@ class NpmShrinkWrap(FetchMethod):
109 129
110 # Handle http tarball sources 130 # Handle http tarball sources
111 elif version.startswith("http") and integrity: 131 elif version.startswith("http") and integrity:
112 localfile = os.path.join("npm2", os.path.basename(version)) 132 localfile = npm_localfile(os.path.basename(version))
113 133
114 uri = URI(version) 134 uri = URI(version)
115 uri.params["downloadfilename"] = localfile 135 uri.params["downloadfilename"] = localfile
@@ -121,8 +141,28 @@ class NpmShrinkWrap(FetchMethod):
121 141
122 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 142 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
123 143
144 # Handle local tarball and link sources
145 elif version.startswith("file"):
146 localpath = version[5:]
147 if not version.endswith(".tgz"):
148 unpack = False
149
124 # Handle git sources 150 # Handle git sources
125 elif version.startswith("git"): 151 elif version.startswith(("git", "bitbucket","gist")) or (
152 not version.endswith((".tgz", ".tar", ".tar.gz"))
153 and not version.startswith((".", "@", "/"))
154 and "/" in version
155 ):
156 if version.startswith("github:"):
157 version = "git+https://github.com/" + version[len("github:"):]
158 elif version.startswith("gist:"):
159 version = "git+https://gist.github.com/" + version[len("gist:"):]
160 elif version.startswith("bitbucket:"):
161 version = "git+https://bitbucket.org/" + version[len("bitbucket:"):]
162 elif version.startswith("gitlab:"):
163 version = "git+https://gitlab.com/" + version[len("gitlab:"):]
164 elif not version.startswith(("git+","git:")):
165 version = "git+https://github.com/" + version
126 regex = re.compile(r""" 166 regex = re.compile(r"""
127 ^ 167 ^
128 git\+ 168 git\+
@@ -148,15 +188,17 @@ class NpmShrinkWrap(FetchMethod):
148 188
149 url = str(uri) 189 url = str(uri)
150 190
151 # local tarball sources and local link sources are unsupported
152 else: 191 else:
153 raise ParameterError("Unsupported dependency: %s" % name, ud.url) 192 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
154 193
194 # name is needed by unpack tracer for module mapping
155 ud.deps.append({ 195 ud.deps.append({
196 "name": name,
156 "url": url, 197 "url": url,
157 "localpath": localpath, 198 "localpath": localpath,
158 "extrapaths": extrapaths, 199 "extrapaths": extrapaths,
159 "destsuffix": destsuffix, 200 "destsuffix": destsuffix,
201 "unpack": unpack,
160 }) 202 })
161 203
162 try: 204 try:
@@ -177,17 +219,23 @@ class NpmShrinkWrap(FetchMethod):
177 # This fetcher resolves multiple URIs from a shrinkwrap file and then 219 # This fetcher resolves multiple URIs from a shrinkwrap file and then
178 # forwards it to a proxy fetcher. The management of the donestamp file, 220 # forwards it to a proxy fetcher. The management of the donestamp file,
179 # the lockfile and the checksums are forwarded to the proxy fetcher. 221 # the lockfile and the checksums are forwarded to the proxy fetcher.
180 ud.proxy = Fetch([dep["url"] for dep in ud.deps], data) 222 shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
223 if shrinkwrap_urls:
224 ud.proxy = Fetch(shrinkwrap_urls, data)
181 ud.needdonestamp = False 225 ud.needdonestamp = False
182 226
183 @staticmethod 227 @staticmethod
184 def _foreach_proxy_method(ud, handle): 228 def _foreach_proxy_method(ud, handle):
185 returns = [] 229 returns = []
186 for proxy_url in ud.proxy.urls: 230 #Check if there are dependencies before try to fetch them
187 proxy_ud = ud.proxy.ud[proxy_url] 231 if len(ud.deps) > 0:
188 proxy_d = ud.proxy.d 232 for proxy_url in ud.proxy.urls:
189 proxy_ud.setup_localpath(proxy_d) 233 proxy_ud = ud.proxy.ud[proxy_url]
190 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) 234 proxy_d = ud.proxy.d
235 proxy_ud.setup_localpath(proxy_d)
236 lf = lockfile(proxy_ud.lockfile)
237 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
238 unlockfile(lf)
191 return returns 239 return returns
192 240
193 def verify_donestamp(self, ud, d): 241 def verify_donestamp(self, ud, d):
@@ -224,6 +272,7 @@ class NpmShrinkWrap(FetchMethod):
224 destsuffix = ud.parm.get("destsuffix") 272 destsuffix = ud.parm.get("destsuffix")
225 if destsuffix: 273 if destsuffix:
226 destdir = os.path.join(rootdir, destsuffix) 274 destdir = os.path.join(rootdir, destsuffix)
275 ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
227 276
228 bb.utils.mkdirhier(destdir) 277 bb.utils.mkdirhier(destdir)
229 bb.utils.copyfile(ud.shrinkwrap_file, 278 bb.utils.copyfile(ud.shrinkwrap_file,
@@ -237,7 +286,16 @@ class NpmShrinkWrap(FetchMethod):
237 286
238 for dep in manual: 287 for dep in manual:
239 depdestdir = os.path.join(destdir, dep["destsuffix"]) 288 depdestdir = os.path.join(destdir, dep["destsuffix"])
240 npm_unpack(dep["localpath"], depdestdir, d) 289 if dep["url"]:
290 npm_unpack(dep["localpath"], depdestdir, d)
291 else:
292 depsrcdir= os.path.join(destdir, dep["localpath"])
293 if dep["unpack"]:
294 npm_unpack(depsrcdir, depdestdir, d)
295 else:
296 bb.utils.mkdirhier(depdestdir)
297 cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
298 runfetchcmd(cmd, d, workdir=depdestdir)
241 299
242 def clean(self, ud, d): 300 def clean(self, ud, d):
243 """Clean any existing full or partial download""" 301 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index d9ce44390c..495ac8a30a 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4""" 6"""
@@ -9,6 +11,7 @@ Based on the svn "Fetch" implementation.
9 11
10import logging 12import logging
11import os 13import os
14import re
12import bb 15import bb
13from bb.fetch2 import FetchMethod 16from bb.fetch2 import FetchMethod
14from bb.fetch2 import FetchError 17from bb.fetch2 import FetchError
@@ -36,6 +39,7 @@ class Osc(FetchMethod):
36 # Create paths to osc checkouts 39 # Create paths to osc checkouts
37 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc") 40 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
38 relpath = self._strip_leading_slashes(ud.path) 41 relpath = self._strip_leading_slashes(ud.path)
42 ud.oscdir = oscdir
39 ud.pkgdir = os.path.join(oscdir, ud.host) 43 ud.pkgdir = os.path.join(oscdir, ud.host)
40 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) 44 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
41 45
@@ -43,13 +47,13 @@ class Osc(FetchMethod):
43 ud.revision = ud.parm['rev'] 47 ud.revision = ud.parm['rev']
44 else: 48 else:
45 pv = d.getVar("PV", False) 49 pv = d.getVar("PV", False)
46 rev = bb.fetch2.srcrev_internal_helper(ud, d) 50 rev = bb.fetch2.srcrev_internal_helper(ud, d, '')
47 if rev: 51 if rev:
48 ud.revision = rev 52 ud.revision = rev
49 else: 53 else:
50 ud.revision = "" 54 ud.revision = ""
51 55
52 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision)) 56 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), relpath.replace('/', '.'), ud.revision))
53 57
54 def _buildosccommand(self, ud, d, command): 58 def _buildosccommand(self, ud, d, command):
55 """ 59 """
@@ -59,26 +63,49 @@ class Osc(FetchMethod):
59 63
60 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" 64 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
61 65
62 proto = ud.parm.get('protocol', 'ocs') 66 proto = ud.parm.get('protocol', 'https')
63 67
64 options = [] 68 options = []
65 69
66 config = "-c %s" % self.generate_config(ud, d) 70 config = "-c %s" % self.generate_config(ud, d)
67 71
68 if ud.revision: 72 if getattr(ud, 'revision', ''):
69 options.append("-r %s" % ud.revision) 73 options.append("-r %s" % ud.revision)
70 74
71 coroot = self._strip_leading_slashes(ud.path) 75 coroot = self._strip_leading_slashes(ud.path)
72 76
73 if command == "fetch": 77 if command == "fetch":
74 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) 78 osccmd = "%s %s -A %s://%s co %s/%s %s" % (basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options))
75 elif command == "update": 79 elif command == "update":
76 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) 80 osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options))
81 elif command == "api_source":
82 osccmd = "%s %s -A %s://%s api source/%s/%s" % (basecmd, config, proto, ud.host, coroot, ud.module)
77 else: 83 else:
78 raise FetchError("Invalid osc command %s" % command, ud.url) 84 raise FetchError("Invalid osc command %s" % command, ud.url)
79 85
80 return osccmd 86 return osccmd
81 87
88 def _latest_revision(self, ud, d, name):
89 """
90 Fetch latest revision for the given package
91 """
92 api_source_cmd = self._buildosccommand(ud, d, "api_source")
93
94 output = runfetchcmd(api_source_cmd, d)
95 match = re.match(r'<directory ?.* rev="(\d+)".*>', output)
96 if match is None:
97 raise FetchError("Unable to parse osc response", ud.url)
98 return match.groups()[0]
99
100 def _revision_key(self, ud, d, name):
101 """
102 Return a unique key for the url
103 """
104 # Collapse adjacent slashes
105 slash_re = re.compile(r"/+")
106 rev = getattr(ud, 'revision', "latest")
107 return "osc:%s%s.%s.%s" % (ud.host, slash_re.sub(".", ud.path), name, rev)
108
82 def download(self, ud, d): 109 def download(self, ud, d):
83 """ 110 """
84 Fetch url 111 Fetch url
@@ -86,7 +113,7 @@ class Osc(FetchMethod):
86 113
87 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") 114 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
88 115
89 if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): 116 if os.access(ud.moddir, os.R_OK):
90 oscupdatecmd = self._buildosccommand(ud, d, "update") 117 oscupdatecmd = self._buildosccommand(ud, d, "update")
91 logger.info("Update "+ ud.url) 118 logger.info("Update "+ ud.url)
92 # update sources there 119 # update sources there
@@ -114,20 +141,23 @@ class Osc(FetchMethod):
114 Generate a .oscrc to be used for this run. 141 Generate a .oscrc to be used for this run.
115 """ 142 """
116 143
117 config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") 144 config_path = os.path.join(ud.oscdir, "oscrc")
145 if not os.path.exists(ud.oscdir):
146 bb.utils.mkdirhier(ud.oscdir)
147
118 if (os.path.exists(config_path)): 148 if (os.path.exists(config_path)):
119 os.remove(config_path) 149 os.remove(config_path)
120 150
121 f = open(config_path, 'w') 151 f = open(config_path, 'w')
152 proto = ud.parm.get('protocol', 'https')
122 f.write("[general]\n") 153 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host) 154 f.write("apiurl = %s://%s\n" % (proto, ud.host))
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n") 155 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % d.getVar('WORKDIR')) 156 f.write("build-root = %s\n" % d.getVar('WORKDIR'))
127 f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) 157 f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
128 f.write("extra-pkgs = gzip\n") 158 f.write("extra-pkgs = gzip\n")
129 f.write("\n") 159 f.write("\n")
130 f.write("[%s]\n" % ud.host) 160 f.write("[%s://%s]\n" % (proto, ud.host))
131 f.write("user = %s\n" % ud.parm["user"]) 161 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"]) 162 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close() 163 f.close()
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index e2a41a4a12..3b6fa4b1ec 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@ class Perforce(FetchMethod):
134 134
135 ud.setup_revisions(d) 135 ud.setup_revisions(d)
136 136
137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) 137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
138 138
139 def _buildp4command(self, ud, d, command, depot_filename=None): 139 def _buildp4command(self, ud, d, command, depot_filename=None):
140 """ 140 """
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
index ffca73c8e4..6b8ffd5359 100644
--- a/bitbake/lib/bb/fetch2/s3.py
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -18,10 +18,47 @@ The aws tool must be correctly installed and configured prior to use.
18import os 18import os
19import bb 19import bb
20import urllib.request, urllib.parse, urllib.error 20import urllib.request, urllib.parse, urllib.error
21import re
21from bb.fetch2 import FetchMethod 22from bb.fetch2 import FetchMethod
22from bb.fetch2 import FetchError 23from bb.fetch2 import FetchError
23from bb.fetch2 import runfetchcmd 24from bb.fetch2 import runfetchcmd
24 25
26def convertToBytes(value, unit):
27 value = float(value)
28 if (unit == "KiB"):
29 value = value*1024.0;
30 elif (unit == "MiB"):
31 value = value*1024.0*1024.0;
32 elif (unit == "GiB"):
33 value = value*1024.0*1024.0*1024.0;
34 return value
35
36class S3ProgressHandler(bb.progress.LineFilterProgressHandler):
37 """
38 Extract progress information from s3 cp output, e.g.:
39 Completed 5.1 KiB/8.8 GiB (12.0 MiB/s) with 1 file(s) remaining
40 """
41 def __init__(self, d):
42 super(S3ProgressHandler, self).__init__(d)
43 # Send an initial progress event so the bar gets shown
44 self._fire_progress(0)
45
46 def writeline(self, line):
47 percs = re.findall(r'^Completed (\d+.{0,1}\d*) (\w+)\/(\d+.{0,1}\d*) (\w+) (\(.+\)) with\s+', line)
48 if percs:
49 completed = (percs[-1][0])
50 completedUnit = (percs[-1][1])
51 total = (percs[-1][2])
52 totalUnit = (percs[-1][3])
53 completed = convertToBytes(completed, completedUnit)
54 total = convertToBytes(total, totalUnit)
55 progress = (completed/total)*100.0
56 rate = percs[-1][4]
57 self.update(progress, rate)
58 return False
59 return True
60
61
25class S3(FetchMethod): 62class S3(FetchMethod):
26 """Class to fetch urls via 'aws s3'""" 63 """Class to fetch urls via 'aws s3'"""
27 64
@@ -52,7 +89,9 @@ class S3(FetchMethod):
52 89
53 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) 90 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
54 bb.fetch2.check_network_access(d, cmd, ud.url) 91 bb.fetch2.check_network_access(d, cmd, ud.url)
55 runfetchcmd(cmd, d) 92
93 progresshandler = S3ProgressHandler(d)
94 runfetchcmd(cmd, d, False, log=progresshandler)
56 95
57 # Additional sanity checks copied from the wget class (although there 96 # Additional sanity checks copied from the wget class (although there
58 # are no known issues which mean these are required, treat the aws cli 97 # are no known issues which mean these are required, treat the aws cli
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index f87f292e5d..7884cce949 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -103,7 +103,7 @@ class SFTP(FetchMethod):
103 if path[:3] == '/~/': 103 if path[:3] == '/~/':
104 path = path[3:] 104 path = path[3:]
105 105
106 remote = '%s%s:%s' % (user, urlo.hostname, path) 106 remote = '"%s%s:%s"' % (user, urlo.hostname, path)
107 107
108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) 108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
109 109
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 2c8557e1f8..0cbb2a6f25 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -32,6 +32,7 @@ IETF secsh internet draft:
32 32
33import re, os 33import re, os
34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd 34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
35import urllib
35 36
36 37
37__pattern__ = re.compile(r''' 38__pattern__ = re.compile(r'''
@@ -40,9 +41,9 @@ __pattern__ = re.compile(r'''
40 ( # Optional username/password block 41 ( # Optional username/password block
41 (?P<user>\S+) # username 42 (?P<user>\S+) # username
42 (:(?P<pass>\S+))? # colon followed by the password (optional) 43 (:(?P<pass>\S+))? # colon followed by the password (optional)
43 )?
44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) 44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
45 @ 45 @
46 )?
46 (?P<host>\S+?) # non-greedy match of the host 47 (?P<host>\S+?) # non-greedy match of the host
47 (:(?P<port>[0-9]+))? # colon followed by the port (optional) 48 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
48 / 49 /
@@ -70,6 +71,7 @@ class SSH(FetchMethod):
70 "git:// prefix with protocol=ssh", urldata.url) 71 "git:// prefix with protocol=ssh", urldata.url)
71 m = __pattern__.match(urldata.url) 72 m = __pattern__.match(urldata.url)
72 path = m.group('path') 73 path = m.group('path')
74 path = urllib.parse.unquote(path)
73 host = m.group('host') 75 host = m.group('host')
74 urldata.localpath = os.path.join(d.getVar('DL_DIR'), 76 urldata.localpath = os.path.join(d.getVar('DL_DIR'),
75 os.path.basename(os.path.normpath(path))) 77 os.path.basename(os.path.normpath(path)))
@@ -96,6 +98,11 @@ class SSH(FetchMethod):
96 fr += '@%s' % host 98 fr += '@%s' % host
97 else: 99 else:
98 fr = host 100 fr = host
101
102 if path[0] != '~':
103 path = '/%s' % path
104 path = urllib.parse.unquote(path)
105
99 fr += ':%s' % path 106 fr += ':%s' % path
100 107
101 cmd = 'scp -B -r %s %s %s/' % ( 108 cmd = 'scp -B -r %s %s %s/' % (
@@ -108,3 +115,41 @@ class SSH(FetchMethod):
108 115
109 runfetchcmd(cmd, d) 116 runfetchcmd(cmd, d)
110 117
118 def checkstatus(self, fetch, urldata, d):
119 """
120 Check the status of the url
121 """
122 m = __pattern__.match(urldata.url)
123 path = m.group('path')
124 host = m.group('host')
125 port = m.group('port')
126 user = m.group('user')
127 password = m.group('pass')
128
129 if port:
130 portarg = '-P %s' % port
131 else:
132 portarg = ''
133
134 if user:
135 fr = user
136 if password:
137 fr += ':%s' % password
138 fr += '@%s' % host
139 else:
140 fr = host
141
142 if path[0] != '~':
143 path = '/%s' % path
144 path = urllib.parse.unquote(path)
145
146 cmd = 'ssh -o BatchMode=true %s %s [ -f %s ]' % (
147 portarg,
148 fr,
149 path
150 )
151
152 check_network_access(d, cmd, urldata.url)
153 runfetchcmd(cmd, d)
154
155 return True
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index 8856ef1c62..d40e4d2909 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -57,7 +57,12 @@ class Svn(FetchMethod):
57 if 'rev' in ud.parm: 57 if 'rev' in ud.parm:
58 ud.revision = ud.parm['rev'] 58 ud.revision = ud.parm['rev']
59 59
60 ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision)) 60 # Whether to use the @REV peg-revision syntax in the svn command or not
61 ud.pegrevision = True
62 if 'nopegrevision' in ud.parm:
63 ud.pegrevision = False
64
65 ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision]))
61 66
62 def _buildsvncommand(self, ud, d, command): 67 def _buildsvncommand(self, ud, d, command):
63 """ 68 """
@@ -86,7 +91,7 @@ class Svn(FetchMethod):
86 if command == "info": 91 if command == "info":
87 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 92 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
88 elif command == "log1": 93 elif command == "log1":
89 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 94 svncmd = "%s log --limit 1 --quiet %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
90 else: 95 else:
91 suffix = "" 96 suffix = ""
92 97
@@ -98,7 +103,8 @@ class Svn(FetchMethod):
98 103
99 if ud.revision: 104 if ud.revision:
100 options.append("-r %s" % ud.revision) 105 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision) 106 if ud.pegrevision:
107 suffix = "@%s" % (ud.revision)
102 108
103 if command == "fetch": 109 if command == "fetch":
104 transportuser = ud.parm.get("transportuser", "") 110 transportuser = ud.parm.get("transportuser", "")
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 6d82f3af07..fbfa6938ac 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
26from bb.fetch2 import FetchError 26from bb.fetch2 import FetchError
27from bb.fetch2 import logger 27from bb.fetch2 import logger
28from bb.fetch2 import runfetchcmd 28from bb.fetch2 import runfetchcmd
29from bb.utils import export_proxies
30from bs4 import BeautifulSoup 29from bs4 import BeautifulSoup
31from bs4 import SoupStrainer 30from bs4 import SoupStrainer
32 31
@@ -52,18 +51,24 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
52 51
53 52
54class Wget(FetchMethod): 53class Wget(FetchMethod):
54 """Class to fetch urls via 'wget'"""
55 55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail 56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail
57 # with the standard wget/urllib User-Agent, so pretend to be a modern 57 # with the standard wget/urllib User-Agent, so pretend to be a modern
58 # browser. 58 # browser.
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" 59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0"
60 60
61 """Class to fetch urls via 'wget'""" 61 def check_certs(self, d):
62 """
63 Should certificates be checked?
64 """
65 return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
66
62 def supports(self, ud, d): 67 def supports(self, ud, d):
63 """ 68 """
64 Check to see if a given url can be fetched with wget. 69 Check to see if a given url can be fetched with wget.
65 """ 70 """
66 return ud.type in ['http', 'https', 'ftp'] 71 return ud.type in ['http', 'https', 'ftp', 'ftps']
67 72
68 def recommends_checksum(self, urldata): 73 def recommends_checksum(self, urldata):
69 return True 74 return True
@@ -82,7 +87,13 @@ class Wget(FetchMethod):
82 if not ud.localfile: 87 if not ud.localfile:
83 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 88 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
84 89
85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" 90 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30"
91
92 if ud.type == 'ftp' or ud.type == 'ftps':
93 self.basecmd += " --passive-ftp"
94
95 if not self.check_certs(d):
96 self.basecmd += " --no-check-certificate"
86 97
87 def _runwget(self, ud, d, command, quiet, workdir=None): 98 def _runwget(self, ud, d, command, quiet, workdir=None):
88 99
@@ -97,13 +108,22 @@ class Wget(FetchMethod):
97 108
98 fetchcmd = self.basecmd 109 fetchcmd = self.basecmd
99 110
100 if 'downloadfilename' in ud.parm: 111 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) + ".tmp"
101 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) 112 bb.utils.mkdirhier(os.path.dirname(localpath))
102 bb.utils.mkdirhier(os.path.dirname(localpath)) 113 fetchcmd += " -O %s" % shlex.quote(localpath)
103 fetchcmd += " -O %s" % shlex.quote(localpath)
104 114
105 if ud.user and ud.pswd: 115 if ud.user and ud.pswd:
106 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) 116 fetchcmd += " --auth-no-challenge"
117 if ud.parm.get("redirectauth", "1") == "1":
118 # An undocumented feature of wget is that if the
119 # username/password are specified on the URI, wget will only
120 # send the Authorization header to the first host and not to
121 # any hosts that it is redirected to. With the increasing
122 # usage of temporary AWS URLs, this difference now matters as
123 # AWS will reject any request that has authentication both in
124 # the query parameters (from the redirect) and in the
125 # Authorization header.
126 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
107 127
108 uri = ud.url.split(";")[0] 128 uri = ud.url.split(";")[0]
109 if os.path.exists(ud.localpath): 129 if os.path.exists(ud.localpath):
@@ -114,6 +134,15 @@ class Wget(FetchMethod):
114 134
115 self._runwget(ud, d, fetchcmd, False) 135 self._runwget(ud, d, fetchcmd, False)
116 136
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum)
140 bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
141
142 # Remove the ".tmp" and move the file into position atomically
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4])
145
117 # Sanity check since wget can pretend it succeed when it didn't 146 # Sanity check since wget can pretend it succeed when it didn't
118 # Also, this used to happen if sourceforge sent us to the mirror page 147 # Also, this used to happen if sourceforge sent us to the mirror page
119 if not os.path.exists(ud.localpath): 148 if not os.path.exists(ud.localpath):
@@ -209,7 +238,7 @@ class Wget(FetchMethod):
209 # We let the request fail and expect it to be 238 # We let the request fail and expect it to be
210 # tried once more ("try_again" in check_status()), 239 # tried once more ("try_again" in check_status()),
211 # with the dead connection removed from the cache. 240 # with the dead connection removed from the cache.
212 # If it still fails, we give up, which can happend for bad 241 # If it still fails, we give up, which can happen for bad
213 # HTTP proxy settings. 242 # HTTP proxy settings.
214 fetch.connection_cache.remove_connection(h.host, h.port) 243 fetch.connection_cache.remove_connection(h.host, h.port)
215 raise urllib.error.URLError(err) 244 raise urllib.error.URLError(err)
@@ -282,64 +311,76 @@ class Wget(FetchMethod):
282 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) 311 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
283 newreq.get_method = req.get_method 312 newreq.get_method = req.get_method
284 return newreq 313 return newreq
285 exported_proxies = export_proxies(d)
286
287 handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback]
288 if exported_proxies:
289 handlers.append(urllib.request.ProxyHandler())
290 handlers.append(CacheHTTPHandler())
291 # Since Python 2.7.9 ssl cert validation is enabled by default
292 # see PEP-0476, this causes verification errors on some https servers
293 # so disable by default.
294 import ssl
295 if hasattr(ssl, '_create_unverified_context'):
296 handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context()))
297 opener = urllib.request.build_opener(*handlers)
298
299 try:
300 uri = ud.url.split(";")[0]
301 r = urllib.request.Request(uri)
302 r.get_method = lambda: "HEAD"
303 # Some servers (FusionForge, as used on Alioth) require that the
304 # optional Accept header is set.
305 r.add_header("Accept", "*/*")
306 r.add_header("User-Agent", self.user_agent)
307 def add_basic_auth(login_str, request):
308 '''Adds Basic auth to http request, pass in login:password as string'''
309 import base64
310 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
311 authheader = "Basic %s" % encodeuser
312 r.add_header("Authorization", authheader)
313
314 if ud.user and ud.pswd:
315 add_basic_auth(ud.user + ':' + ud.pswd, r)
316 314
317 try: 315 # We need to update the environment here as both the proxy and HTTPS
318 import netrc 316 # handlers need variables set. The proxy needs http_proxy and friends to
319 n = netrc.netrc() 317 # be set, and HTTPSHandler ends up calling into openssl to load the
320 login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) 318 # certificates. In buildtools configurations this will be looking at the
321 add_basic_auth("%s:%s" % (login, password), r) 319 # wrong place for certificates by default: we set SSL_CERT_FILE to the
322 except (TypeError, ImportError, IOError, netrc.NetrcParseError): 320 # right location in the buildtools environment script but as BitBake
323 pass 321 # prunes prunes the environment this is lost. When binaries are executed
324 322 # runfetchcmd ensures these values are in the environment, but this is
325 with opener.open(r) as response: 323 # pure Python so we need to update the environment.
326 pass 324 #
327 except urllib.error.URLError as e: 325 # Avoid tramping the environment too much by using bb.utils.environment
328 if try_again: 326 # to scope the changes to the build_opener request, which is when the
329 logger.debug2("checkstatus: trying again") 327 # environment lookups happen.
330 return self.checkstatus(fetch, ud, d, False) 328 newenv = bb.fetch2.get_fetcher_environment(d)
329
330 with bb.utils.environment(**newenv):
331 import ssl
332
333 if self.check_certs(d):
334 context = ssl.create_default_context()
331 else: 335 else:
332 # debug for now to avoid spamming the logs in e.g. remote sstate searches 336 context = ssl._create_unverified_context()
333 logger.debug2("checkstatus() urlopen failed: %s" % e) 337
334 return False 338 handlers = [FixedHTTPRedirectHandler,
335 except ConnectionResetError as e: 339 HTTPMethodFallback,
336 if try_again: 340 urllib.request.ProxyHandler(),
337 logger.debug2("checkstatus: trying again") 341 CacheHTTPHandler(),
338 return self.checkstatus(fetch, ud, d, False) 342 urllib.request.HTTPSHandler(context=context)]
339 else: 343 opener = urllib.request.build_opener(*handlers)
340 # debug for now to avoid spamming the logs in e.g. remote sstate searches 344
341 logger.debug2("checkstatus() urlopen failed: %s" % e) 345 try:
342 return False 346 uri_base = ud.url.split(";")[0]
347 uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path)
348 r = urllib.request.Request(uri)
349 r.get_method = lambda: "HEAD"
350 # Some servers (FusionForge, as used on Alioth) require that the
351 # optional Accept header is set.
352 r.add_header("Accept", "*/*")
353 r.add_header("User-Agent", self.user_agent)
354 def add_basic_auth(login_str, request):
355 '''Adds Basic auth to http request, pass in login:password as string'''
356 import base64
357 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
358 authheader = "Basic %s" % encodeuser
359 r.add_header("Authorization", authheader)
360
361 if ud.user and ud.pswd:
362 add_basic_auth(ud.user + ':' + ud.pswd, r)
363
364 try:
365 import netrc
366 auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
367 if auth_data:
368 login, _, password = auth_data
369 add_basic_auth("%s:%s" % (login, password), r)
370 except (FileNotFoundError, netrc.NetrcParseError):
371 pass
372
373 with opener.open(r, timeout=30) as response:
374 pass
375 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
376 if try_again:
377 logger.debug2("checkstatus: trying again")
378 return self.checkstatus(fetch, ud, d, False)
379 else:
380 # debug for now to avoid spamming the logs in e.g. remote sstate searches
381 logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
382 return False
383
343 return True 384 return True
344 385
345 def _parse_path(self, regex, s): 386 def _parse_path(self, regex, s):
@@ -472,7 +513,7 @@ class Wget(FetchMethod):
472 version_dir = ['', '', ''] 513 version_dir = ['', '', '']
473 version = ['', '', ''] 514 version = ['', '', '']
474 515
475 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))") 516 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
476 s = dirver_regex.search(dirver) 517 s = dirver_regex.search(dirver)
477 if s: 518 if s:
478 version_dir[1] = s.group('ver') 519 version_dir[1] = s.group('ver')
@@ -548,7 +589,7 @@ class Wget(FetchMethod):
548 589
549 # src.rpm extension was added only for rpm package. Can be removed if the rpm 590 # src.rpm extension was added only for rpm package. Can be removed if the rpm
550 # packaged will always be considered as having to be manually upgraded 591 # packaged will always be considered as having to be manually upgraded
551 psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" 592 psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
552 593
553 # match name, version and archive type of a package 594 # match name, version and archive type of a package
554 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)" 595 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
@@ -599,10 +640,10 @@ class Wget(FetchMethod):
599 # search for version matches on folders inside the path, like: 640 # search for version matches on folders inside the path, like:
600 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz 641 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
601 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") 642 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
602 m = dirver_regex.search(path) 643 m = dirver_regex.findall(path)
603 if m: 644 if m:
604 pn = d.getVar('PN') 645 pn = d.getVar('PN')
605 dirver = m.group('dirver') 646 dirver = m[-1][0]
606 647
607 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) 648 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
608 if not dirver_pn_regex.search(dirver): 649 if not dirver_pn_regex.search(dirver):
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
index 06bad495ac..bca8ebfa09 100755
--- a/bitbake/lib/bb/main.py
+++ b/bitbake/lib/bb/main.py
@@ -12,11 +12,12 @@
12import os 12import os
13import sys 13import sys
14import logging 14import logging
15import optparse 15import argparse
16import warnings 16import warnings
17import fcntl 17import fcntl
18import time 18import time
19import traceback 19import traceback
20import datetime
20 21
21import bb 22import bb
22from bb import event 23from bb import event
@@ -43,18 +44,18 @@ def present_options(optionlist):
43 else: 44 else:
44 return optionlist[0] 45 return optionlist[0]
45 46
46class BitbakeHelpFormatter(optparse.IndentedHelpFormatter): 47class BitbakeHelpFormatter(argparse.HelpFormatter):
47 def format_option(self, option): 48 def _get_help_string(self, action):
48 # We need to do this here rather than in the text we supply to 49 # We need to do this here rather than in the text we supply to
49 # add_option() because we don't want to call list_extension_modules() 50 # add_option() because we don't want to call list_extension_modules()
50 # on every execution (since it imports all of the modules) 51 # on every execution (since it imports all of the modules)
51 # Note also that we modify option.help rather than the returned text 52 # Note also that we modify option.help rather than the returned text
52 # - this is so that we don't have to re-format the text ourselves 53 # - this is so that we don't have to re-format the text ourselves
53 if option.dest == 'ui': 54 if action.dest == 'ui':
54 valid_uis = list_extension_modules(bb.ui, 'main') 55 valid_uis = list_extension_modules(bb.ui, 'main')
55 option.help = option.help.replace('@CHOICES@', present_options(valid_uis)) 56 return action.help.replace('@CHOICES@', present_options(valid_uis))
56 57
57 return optparse.IndentedHelpFormatter.format_option(self, option) 58 return action.help
58 59
59def list_extension_modules(pkg, checkattr): 60def list_extension_modules(pkg, checkattr):
60 """ 61 """
@@ -112,189 +113,209 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
112 warnlog.warning(s) 113 warnlog.warning(s)
113 114
114warnings.showwarning = _showwarning 115warnings.showwarning = _showwarning
115warnings.filterwarnings("ignore")
116warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
117warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
118warnings.filterwarnings("ignore", category=ImportWarning)
119warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
120warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
121
122 116
123def create_bitbake_parser(): 117def create_bitbake_parser():
124 parser = optparse.OptionParser( 118 parser = argparse.ArgumentParser(
125 formatter=BitbakeHelpFormatter(), 119 description="""\
126 version="BitBake Build Tool Core version %s" % bb.__version__, 120 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
127 usage="""%prog [options] [recipename/target recipe:do_task ...] 121 will provide the layer, BBFILES and other configuration information.
128 122 """,
129 Executes the specified task (default is 'build') for a given set of target recipes (.bb files). 123 formatter_class=BitbakeHelpFormatter,
130 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which 124 allow_abbrev=False,
131 will provide the layer, BBFILES and other configuration information.""") 125 add_help=False, # help is manually added below in a specific argument group
132 126 )
133 parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None, 127
134 help="Execute tasks from a specific .bb recipe directly. WARNING: Does " 128 general_group = parser.add_argument_group('General options')
135 "not handle any dependencies from other recipes.") 129 task_group = parser.add_argument_group('Task control options')
136 130 exec_group = parser.add_argument_group('Execution control options')
137 parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True, 131 logging_group = parser.add_argument_group('Logging/output control options')
138 help="Continue as much as possible after an error. While the target that " 132 server_group = parser.add_argument_group('Server options')
139 "failed and anything depending on it cannot be built, as much as " 133 config_group = parser.add_argument_group('Configuration options')
140 "possible will be built before stopping.") 134
141 135 general_group.add_argument("targets", nargs="*", metavar="recipename/target",
142 parser.add_option("-f", "--force", action="store_true", dest="force", default=False, 136 help="Execute the specified task (default is 'build') for these target "
143 help="Force the specified targets/task to run (invalidating any " 137 "recipes (.bb files).")
144 "existing stamp file).") 138
145 139 general_group.add_argument("-s", "--show-versions", action="store_true",
146 parser.add_option("-c", "--cmd", action="store", dest="cmd", 140 help="Show current and preferred versions of all recipes.")
147 help="Specify the task to execute. The exact options available " 141
148 "depend on the metadata. Some examples might be 'compile'" 142 general_group.add_argument("-e", "--environment", action="store_true",
149 " or 'populate_sysroot' or 'listtasks' may give a list of " 143 dest="show_environment",
150 "the tasks available.") 144 help="Show the global or per-recipe environment complete with information"
151 145 " about where variables were set/changed.")
152 parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp", 146
153 help="Invalidate the stamp for the specified task such as 'compile' " 147 general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph",
154 "and then run the default task for the specified target(s).") 148 help="Save dependency tree information for the specified "
155 149 "targets in the dot syntax.")
156 parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
157 help="Read the specified file before bitbake.conf.")
158
159 parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
160 help="Read the specified file after bitbake.conf.")
161
162 parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
163 help="Enable tracing of shell tasks (with 'set -x'). "
164 "Also print bb.note(...) messages to stdout (in "
165 "addition to writing them to ${T}/log.do_<task>).")
166
167 parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
168 help="Increase the debug level. You can specify this "
169 "more than once. -D sets the debug level to 1, "
170 "where only bb.debug(1, ...) messages are printed "
171 "to stdout; -DD sets the debug level to 2, where "
172 "both bb.debug(1, ...) and bb.debug(2, ...) "
173 "messages are printed; etc. Without -D, no debug "
174 "messages are printed. Note that -D only affects "
175 "output to stdout. All debug messages are written "
176 "to ${T}/log.do_taskname, regardless of the debug "
177 "level.")
178
179 parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
180 help="Output less log message data to the terminal. You can specify this more than once.")
181
182 parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
183 help="Don't execute, just go through the motions.")
184
185 parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
186 default=[], metavar="SIGNATURE_HANDLER",
187 help="Dump out the signature construction information, with no task "
188 "execution. The SIGNATURE_HANDLER parameter is passed to the "
189 "handler. Two common values are none and printdiff but the handler "
190 "may define more/less. none means only dump the signature, printdiff"
191 " means compare the dumped signature with the cached one.")
192
193 parser.add_option("-p", "--parse-only", action="store_true",
194 dest="parse_only", default=False,
195 help="Quit after parsing the BB recipes.")
196
197 parser.add_option("-s", "--show-versions", action="store_true",
198 dest="show_versions", default=False,
199 help="Show current and preferred versions of all recipes.")
200
201 parser.add_option("-e", "--environment", action="store_true",
202 dest="show_environment", default=False,
203 help="Show the global or per-recipe environment complete with information"
204 " about where variables were set/changed.")
205
206 parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
207 help="Save dependency tree information for the specified "
208 "targets in the dot syntax.")
209
210 parser.add_option("-I", "--ignore-deps", action="append",
211 dest="extra_assume_provided", default=[],
212 help="Assume these dependencies don't exist and are already provided "
213 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
214 "graphs more appealing")
215
216 parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
217 help="Show debug logging for the specified logging domains")
218
219 parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
220 help="Profile the command and save reports.")
221 150
222 # @CHOICES@ is substituted out by BitbakeHelpFormatter above 151 # @CHOICES@ is substituted out by BitbakeHelpFormatter above
223 parser.add_option("-u", "--ui", action="store", dest="ui", 152 general_group.add_argument("-u", "--ui",
224 default=os.environ.get('BITBAKE_UI', 'knotty'), 153 default=os.environ.get('BITBAKE_UI', 'knotty'),
225 help="The user interface to use (@CHOICES@ - default %default).") 154 help="The user interface to use (@CHOICES@ - default %(default)s).")
226 155
227 parser.add_option("", "--token", action="store", dest="xmlrpctoken", 156 general_group.add_argument("--version", action="store_true",
228 default=os.environ.get("BBTOKEN"), 157 help="Show programs version and exit.")
229 help="Specify the connection token to be used when connecting " 158
230 "to a remote server.") 159 general_group.add_argument('-h', '--help', action='help',
231 160 help='Show this help message and exit.')
232 parser.add_option("", "--revisions-changed", action="store_true", 161
233 dest="revisions_changed", default=False, 162
234 help="Set the exit code depending on whether upstream floating " 163 task_group.add_argument("-f", "--force", action="store_true",
235 "revisions have changed or not.") 164 help="Force the specified targets/task to run (invalidating any "
236 165 "existing stamp file).")
237 parser.add_option("", "--server-only", action="store_true", 166
238 dest="server_only", default=False, 167 task_group.add_argument("-c", "--cmd",
239 help="Run bitbake without a UI, only starting a server " 168 help="Specify the task to execute. The exact options available "
240 "(cooker) process.") 169 "depend on the metadata. Some examples might be 'compile'"
241 170 " or 'populate_sysroot' or 'listtasks' may give a list of "
242 parser.add_option("-B", "--bind", action="store", dest="bind", default=False, 171 "the tasks available.")
243 help="The name/address for the bitbake xmlrpc server to bind to.") 172
244 173 task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp",
245 parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout", 174 help="Invalidate the stamp for the specified task such as 'compile' "
246 default=os.getenv("BB_SERVER_TIMEOUT"), 175 "and then run the default task for the specified target(s).")
247 help="Set timeout to unload bitbake server due to inactivity, " 176
248 "set to -1 means no unload, " 177 task_group.add_argument("--runall", action="append", default=[],
249 "default: Environment variable BB_SERVER_TIMEOUT.") 178 help="Run the specified task for any recipe in the taskgraph of the "
250 179 "specified target (even if it wouldn't otherwise have run).")
251 parser.add_option("", "--no-setscene", action="store_true", 180
252 dest="nosetscene", default=False, 181 task_group.add_argument("--runonly", action="append",
253 help="Do not run any setscene tasks. sstate will be ignored and " 182 help="Run only the specified task within the taskgraph of the "
254 "everything needed, built.") 183 "specified targets (and any task dependencies those tasks may have).")
255 184
256 parser.add_option("", "--skip-setscene", action="store_true", 185 task_group.add_argument("--no-setscene", action="store_true",
257 dest="skipsetscene", default=False, 186 dest="nosetscene",
258 help="Skip setscene tasks if they would be executed. Tasks previously " 187 help="Do not run any setscene tasks. sstate will be ignored and "
259 "restored from sstate will be kept, unlike --no-setscene") 188 "everything needed, built.")
260 189
261 parser.add_option("", "--setscene-only", action="store_true", 190 task_group.add_argument("--skip-setscene", action="store_true",
262 dest="setsceneonly", default=False, 191 dest="skipsetscene",
263 help="Only run setscene tasks, don't run any real tasks.") 192 help="Skip setscene tasks if they would be executed. Tasks previously "
264 193 "restored from sstate will be kept, unlike --no-setscene.")
265 parser.add_option("", "--remote-server", action="store", dest="remote_server", 194
266 default=os.environ.get("BBSERVER"), 195 task_group.add_argument("--setscene-only", action="store_true",
267 help="Connect to the specified server.") 196 dest="setsceneonly",
268 197 help="Only run setscene tasks, don't run any real tasks.")
269 parser.add_option("-m", "--kill-server", action="store_true", 198
270 dest="kill_server", default=False, 199
271 help="Terminate any running bitbake server.") 200 exec_group.add_argument("-n", "--dry-run", action="store_true",
272 201 help="Don't execute, just go through the motions.")
273 parser.add_option("", "--observe-only", action="store_true", 202
274 dest="observe_only", default=False, 203 exec_group.add_argument("-p", "--parse-only", action="store_true",
275 help="Connect to a server as an observing-only client.") 204 help="Quit after parsing the BB recipes.")
276 205
277 parser.add_option("", "--status-only", action="store_true", 206 exec_group.add_argument("-k", "--continue", action="store_false", dest="halt",
278 dest="status_only", default=False, 207 help="Continue as much as possible after an error. While the target that "
279 help="Check the status of the remote bitbake server.") 208 "failed and anything depending on it cannot be built, as much as "
280 209 "possible will be built before stopping.")
281 parser.add_option("-w", "--write-log", action="store", dest="writeeventlog", 210
282 default=os.environ.get("BBEVENTLOG"), 211 exec_group.add_argument("-P", "--profile", action="store_true",
283 help="Writes the event log of the build to a bitbake event json file. " 212 help="Profile the command and save reports.")
284 "Use '' (empty string) to assign the name automatically.") 213
285 214 exec_group.add_argument("-S", "--dump-signatures", action="append",
286 parser.add_option("", "--runall", action="append", dest="runall", 215 default=[], metavar="SIGNATURE_HANDLER",
287 help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).") 216 help="Dump out the signature construction information, with no task "
288 217 "execution. The SIGNATURE_HANDLER parameter is passed to the "
289 parser.add_option("", "--runonly", action="append", dest="runonly", 218 "handler. Two common values are none and printdiff but the handler "
290 help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).") 219 "may define more/less. none means only dump the signature, printdiff"
220 " means recursively compare the dumped signature with the most recent"
221 " one in a local build or sstate cache (can be used to find out why tasks re-run"
222 " when that is not expected)")
223
224 exec_group.add_argument("--revisions-changed", action="store_true",
225 help="Set the exit code depending on whether upstream floating "
226 "revisions have changed or not.")
227
228 exec_group.add_argument("-b", "--buildfile",
229 help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
230 "not handle any dependencies from other recipes.")
231
232 logging_group.add_argument("-D", "--debug", action="count", default=0,
233 help="Increase the debug level. You can specify this "
234 "more than once. -D sets the debug level to 1, "
235 "where only bb.debug(1, ...) messages are printed "
236 "to stdout; -DD sets the debug level to 2, where "
237 "both bb.debug(1, ...) and bb.debug(2, ...) "
238 "messages are printed; etc. Without -D, no debug "
239 "messages are printed. Note that -D only affects "
240 "output to stdout. All debug messages are written "
241 "to ${T}/log.do_taskname, regardless of the debug "
242 "level.")
243
244 logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains",
245 default=[],
246 help="Show debug logging for the specified logging domains.")
247
248 logging_group.add_argument("-v", "--verbose", action="store_true",
249 help="Enable tracing of shell tasks (with 'set -x'). "
250 "Also print bb.note(...) messages to stdout (in "
251 "addition to writing them to ${T}/log.do_<task>).")
252
253 logging_group.add_argument("-q", "--quiet", action="count", default=0,
254 help="Output less log message data to the terminal. You can specify this "
255 "more than once.")
256
257 logging_group.add_argument("-w", "--write-log", dest="writeeventlog",
258 default=os.environ.get("BBEVENTLOG"),
259 help="Writes the event log of the build to a bitbake event json file. "
260 "Use '' (empty string) to assign the name automatically.")
261
262
263 server_group.add_argument("-B", "--bind", default=False,
264 help="The name/address for the bitbake xmlrpc server to bind to.")
265
266 server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout",
267 default=os.getenv("BB_SERVER_TIMEOUT"),
268 help="Set timeout to unload bitbake server due to inactivity, "
269 "set to -1 means no unload, "
270 "default: Environment variable BB_SERVER_TIMEOUT.")
271
272 server_group.add_argument("--remote-server",
273 default=os.environ.get("BBSERVER"),
274 help="Connect to the specified server.")
275
276 server_group.add_argument("-m", "--kill-server", action="store_true",
277 help="Terminate any running bitbake server.")
278
279 server_group.add_argument("--token", dest="xmlrpctoken",
280 default=os.environ.get("BBTOKEN"),
281 help="Specify the connection token to be used when connecting "
282 "to a remote server.")
283
284 server_group.add_argument("--observe-only", action="store_true",
285 help="Connect to a server as an observing-only client.")
286
287 server_group.add_argument("--status-only", action="store_true",
288 help="Check the status of the remote bitbake server.")
289
290 server_group.add_argument("--server-only", action="store_true",
291 help="Run bitbake without a UI, only starting a server "
292 "(cooker) process.")
293
294
295 config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[],
296 help="Read the specified file before bitbake.conf.")
297
298 config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[],
299 help="Read the specified file after bitbake.conf.")
300
301
302 config_group.add_argument("-I", "--ignore-deps", action="append",
303 dest="extra_assume_provided", default=[],
304 help="Assume these dependencies don't exist and are already provided "
305 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
306 "graphs more appealing.")
307
291 return parser 308 return parser
292 309
293 310
294class BitBakeConfigParameters(cookerdata.ConfigParameters): 311class BitBakeConfigParameters(cookerdata.ConfigParameters):
295 def parseCommandLine(self, argv=sys.argv): 312 def parseCommandLine(self, argv=sys.argv):
296 parser = create_bitbake_parser() 313 parser = create_bitbake_parser()
297 options, targets = parser.parse_args(argv) 314 options = parser.parse_intermixed_args(argv[1:])
315
316 if options.version:
317 print("BitBake Build Tool Core version %s" % bb.__version__)
318 sys.exit(0)
298 319
299 if options.quiet and options.verbose: 320 if options.quiet and options.verbose:
300 parser.error("options --quiet and --verbose are mutually exclusive") 321 parser.error("options --quiet and --verbose are mutually exclusive")
@@ -326,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
326 else: 347 else:
327 options.xmlrpcinterface = (None, 0) 348 options.xmlrpcinterface = (None, 0)
328 349
329 return options, targets[1:] 350 return options, options.targets
330 351
331 352
332def bitbake_main(configParams, configuration): 353def bitbake_main(configParams, configuration):
@@ -391,6 +412,9 @@ def bitbake_main(configParams, configuration):
391 412
392 return 1 413 return 1
393 414
415def timestamp():
416 return datetime.datetime.now().strftime('%H:%M:%S.%f')
417
394def setup_bitbake(configParams, extrafeatures=None): 418def setup_bitbake(configParams, extrafeatures=None):
395 # Ensure logging messages get sent to the UI as events 419 # Ensure logging messages get sent to the UI as events
396 handler = bb.event.LogHandler() 420 handler = bb.event.LogHandler()
@@ -398,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None):
398 # In status only mode there are no logs and no UI 422 # In status only mode there are no logs and no UI
399 logger.addHandler(handler) 423 logger.addHandler(handler)
400 424
425 if configParams.dump_signatures:
426 if extrafeatures is None:
427 extrafeatures = []
428 extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
429
401 if configParams.server_only: 430 if configParams.server_only:
402 featureset = [] 431 featureset = []
403 ui_module = None 432 ui_module = None
@@ -425,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None):
425 retries = 8 454 retries = 8
426 while retries: 455 while retries:
427 try: 456 try:
428 topdir, lock = lockBitbake() 457 topdir, lock, lockfile = lockBitbake()
429 sockname = topdir + "/bitbake.sock" 458 sockname = topdir + "/bitbake.sock"
430 if lock: 459 if lock:
431 if configParams.status_only or configParams.kill_server: 460 if configParams.status_only or configParams.kill_server:
@@ -436,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None):
436 logger.info("Starting bitbake server...") 465 logger.info("Starting bitbake server...")
437 # Clear the event queue since we already displayed messages 466 # Clear the event queue since we already displayed messages
438 bb.event.ui_queue = [] 467 bb.event.ui_queue = []
439 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface) 468 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile)
440 469
441 else: 470 else:
442 logger.info("Reconnecting to bitbake server...") 471 logger.info("Reconnecting to bitbake server...")
443 if not os.path.exists(sockname): 472 if not os.path.exists(sockname):
444 logger.info("Previous bitbake instance shutting down?, waiting to retry...") 473 logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp())
474 procs = bb.server.process.get_lockfile_process_msg(lockfile)
475 if procs:
476 logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs))
477 logger.info("Directory listing: %s" % (str(os.listdir(topdir))))
445 i = 0 478 i = 0
446 lock = None 479 lock = None
447 # Wait for 5s or until we can get the lock 480 # Wait for 5s or until we can get the lock
448 while not lock and i < 50: 481 while not lock and i < 50:
449 time.sleep(0.1) 482 time.sleep(0.1)
450 _, lock = lockBitbake() 483 _, lock, _ = lockBitbake()
451 i += 1 484 i += 1
452 if lock: 485 if lock:
453 bb.utils.unlockfile(lock) 486 bb.utils.unlockfile(lock)
@@ -466,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None):
466 retries -= 1 499 retries -= 1
467 tryno = 8 - retries 500 tryno = 8 - retries
468 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)): 501 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)):
469 logger.info("Retrying server connection (#%d)..." % tryno) 502 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp()))
470 else: 503 else:
471 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc())) 504 logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp()))
472 505
473 if not retries: 506 if not retries:
474 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).") 507 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
@@ -497,5 +530,5 @@ def lockBitbake():
497 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?") 530 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
498 raise BBMainFatal 531 raise BBMainFatal
499 lockfile = topdir + "/bitbake.lock" 532 lockfile = topdir + "/bitbake.lock"
500 return topdir, bb.utils.lockfile(lockfile, False, False) 533 return topdir, bb.utils.lockfile(lockfile, False, False), lockfile
501 534
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index 98f2109ed2..f928210351 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -76,7 +76,12 @@ def getDiskData(BBDirs):
76 return None 76 return None
77 77
78 action = pathSpaceInodeRe.group(1) 78 action = pathSpaceInodeRe.group(1)
79 if action not in ("ABORT", "STOPTASKS", "WARN"): 79 if action == "ABORT":
80 # Emit a deprecation warning
81 logger.warnonce("The BB_DISKMON_DIRS \"ABORT\" action has been renamed to \"HALT\", update configuration")
82 action = "HALT"
83
84 if action not in ("HALT", "STOPTASKS", "WARN"):
80 printErr("Unknown disk space monitor action: %s" % action) 85 printErr("Unknown disk space monitor action: %s" % action)
81 return None 86 return None
82 87
@@ -177,7 +182,7 @@ class diskMonitor:
177 # use them to avoid printing too many warning messages 182 # use them to avoid printing too many warning messages
178 self.preFreeS = {} 183 self.preFreeS = {}
179 self.preFreeI = {} 184 self.preFreeI = {}
180 # This is for STOPTASKS and ABORT, to avoid printing the message 185 # This is for STOPTASKS and HALT, to avoid printing the message
181 # repeatedly while waiting for the tasks to finish 186 # repeatedly while waiting for the tasks to finish
182 self.checked = {} 187 self.checked = {}
183 for k in self.devDict: 188 for k in self.devDict:
@@ -219,8 +224,8 @@ class diskMonitor:
219 self.checked[k] = True 224 self.checked[k] = True
220 rq.finish_runqueue(False) 225 rq.finish_runqueue(False)
221 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
222 elif action == "ABORT" and not self.checked[k]: 227 elif action == "HALT" and not self.checked[k]:
223 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 228 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
224 self.checked[k] = True 229 self.checked[k] = True
225 rq.finish_runqueue(True) 230 rq.finish_runqueue(True)
226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 231 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
@@ -229,9 +234,10 @@ class diskMonitor:
229 freeInode = st.f_favail 234 freeInode = st.f_favail
230 235
231 if minInode and freeInode < minInode: 236 if minInode and freeInode < minInode:
232 # Some filesystems use dynamic inodes so can't run out 237 # Some filesystems use dynamic inodes so can't run out.
233 # (e.g. btrfs). This is reported by the inode count being 0. 238 # This is reported by the inode count being 0 (btrfs) or the free
234 if st.f_files == 0: 239 # inode count being -1 (cephfs).
240 if st.f_files == 0 or st.f_favail == -1:
235 self.devDict[k][2] = None 241 self.devDict[k][2] = None
236 continue 242 continue
237 # Always show warning, the self.checked would always be False if the action is WARN 243 # Always show warning, the self.checked would always be False if the action is WARN
@@ -245,8 +251,8 @@ class diskMonitor:
245 self.checked[k] = True 251 self.checked[k] = True
246 rq.finish_runqueue(False) 252 rq.finish_runqueue(False)
247 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 253 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
248 elif action == "ABORT" and not self.checked[k]: 254 elif action == "HALT" and not self.checked[k]:
249 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 255 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
250 self.checked[k] = True 256 self.checked[k] = True
251 rq.finish_runqueue(True) 257 rq.finish_runqueue(True)
252 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 258 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 291b38ff7f..3e18596faa 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -30,7 +30,9 @@ class BBLogFormatter(logging.Formatter):
30 PLAIN = logging.INFO + 1 30 PLAIN = logging.INFO + 1
31 VERBNOTE = logging.INFO + 2 31 VERBNOTE = logging.INFO + 2
32 ERROR = logging.ERROR 32 ERROR = logging.ERROR
33 ERRORONCE = logging.ERROR - 1
33 WARNING = logging.WARNING 34 WARNING = logging.WARNING
35 WARNONCE = logging.WARNING - 1
34 CRITICAL = logging.CRITICAL 36 CRITICAL = logging.CRITICAL
35 37
36 levelnames = { 38 levelnames = {
@@ -42,7 +44,9 @@ class BBLogFormatter(logging.Formatter):
42 PLAIN : '', 44 PLAIN : '',
43 VERBNOTE: 'NOTE', 45 VERBNOTE: 'NOTE',
44 WARNING : 'WARNING', 46 WARNING : 'WARNING',
47 WARNONCE : 'WARNING',
45 ERROR : 'ERROR', 48 ERROR : 'ERROR',
49 ERRORONCE : 'ERROR',
46 CRITICAL: 'ERROR', 50 CRITICAL: 'ERROR',
47 } 51 }
48 52
@@ -58,7 +62,9 @@ class BBLogFormatter(logging.Formatter):
58 PLAIN : BASECOLOR, 62 PLAIN : BASECOLOR,
59 VERBNOTE: BASECOLOR, 63 VERBNOTE: BASECOLOR,
60 WARNING : YELLOW, 64 WARNING : YELLOW,
65 WARNONCE : YELLOW,
61 ERROR : RED, 66 ERROR : RED,
67 ERRORONCE : RED,
62 CRITICAL: RED, 68 CRITICAL: RED,
63 } 69 }
64 70
@@ -121,6 +127,22 @@ class BBLogFilter(object):
121 return True 127 return True
122 return False 128 return False
123 129
130class LogFilterShowOnce(logging.Filter):
131 def __init__(self):
132 self.seen_warnings = set()
133 self.seen_errors = set()
134
135 def filter(self, record):
136 if record.levelno == bb.msg.BBLogFormatter.WARNONCE:
137 if record.msg in self.seen_warnings:
138 return False
139 self.seen_warnings.add(record.msg)
140 if record.levelno == bb.msg.BBLogFormatter.ERRORONCE:
141 if record.msg in self.seen_errors:
142 return False
143 self.seen_errors.add(record.msg)
144 return True
145
124class LogFilterGEQLevel(logging.Filter): 146class LogFilterGEQLevel(logging.Filter):
125 def __init__(self, level): 147 def __init__(self, level):
126 self.strlevel = str(level) 148 self.strlevel = str(level)
@@ -206,8 +228,9 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers
206 """Standalone logger creation function""" 228 """Standalone logger creation function"""
207 logger = logging.getLogger(name) 229 logger = logging.getLogger(name)
208 console = logging.StreamHandler(output) 230 console = logging.StreamHandler(output)
231 console.addFilter(bb.msg.LogFilterShowOnce())
209 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") 232 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
210 if color == 'always' or (color == 'auto' and output.isatty()): 233 if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''):
211 format.enable_color() 234 format.enable_color()
212 console.setFormatter(format) 235 console.setFormatter(format)
213 if preserve_handlers: 236 if preserve_handlers:
@@ -293,10 +316,17 @@ def setLoggingConfig(defaultconfig, userconfigfile=None):
293 316
294 # Convert all level parameters to integers in case users want to use the 317 # Convert all level parameters to integers in case users want to use the
295 # bitbake defined level names 318 # bitbake defined level names
296 for h in logconfig["handlers"].values(): 319 for name, h in logconfig["handlers"].items():
297 if "level" in h: 320 if "level" in h:
298 h["level"] = bb.msg.stringToLevel(h["level"]) 321 h["level"] = bb.msg.stringToLevel(h["level"])
299 322
323 # Every handler needs its own instance of the once filter.
324 once_filter_name = name + ".showonceFilter"
325 logconfig.setdefault("filters", {})[once_filter_name] = {
326 "()": "bb.msg.LogFilterShowOnce",
327 }
328 h.setdefault("filters", []).append(once_filter_name)
329
300 for l in logconfig["loggers"].values(): 330 for l in logconfig["loggers"].values():
301 if "level" in l: 331 if "level" in l:
302 l["level"] = bb.msg.stringToLevel(l["level"]) 332 l["level"] = bb.msg.stringToLevel(l["level"])
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index c01807ba87..a4358f1374 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -60,6 +60,14 @@ def cached_mtime_noerror(f):
60 return 0 60 return 0
61 return __mtime_cache[f] 61 return __mtime_cache[f]
62 62
63def check_mtime(f, mtime):
64 try:
65 current_mtime = os.stat(f)[stat.ST_MTIME]
66 __mtime_cache[f] = current_mtime
67 except OSError:
68 current_mtime = 0
69 return current_mtime == mtime
70
63def update_mtime(f): 71def update_mtime(f):
64 try: 72 try:
65 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
@@ -99,12 +107,12 @@ def supports(fn, data):
99 return 1 107 return 1
100 return 0 108 return 0
101 109
102def handle(fn, data, include = 0): 110def handle(fn, data, include=0, baseconfig=False):
103 """Call the handler that is appropriate for this file""" 111 """Call the handler that is appropriate for this file"""
104 for h in handlers: 112 for h in handlers:
105 if h['supports'](fn, data): 113 if h['supports'](fn, data):
106 with data.inchistory.include(fn): 114 with data.inchistory.include(fn):
107 return h['handle'](fn, data, include) 115 return h['handle'](fn, data, include, baseconfig)
108 raise ParseError("not a BitBake file", fn) 116 raise ParseError("not a BitBake file", fn)
109 117
110def init(fn, data): 118def init(fn, data):
@@ -113,6 +121,8 @@ def init(fn, data):
113 return h['init'](data) 121 return h['init'](data)
114 122
115def init_parser(d): 123def init_parser(d):
124 if hasattr(bb.parse, "siggen"):
125 bb.parse.siggen.exit()
116 bb.parse.siggen = bb.siggen.init(d) 126 bb.parse.siggen = bb.siggen.init(d)
117 127
118def resolve_file(fn, d): 128def resolve_file(fn, d):
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 50a88f7da7..7581d003fd 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -9,6 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12import sys
12import bb 13import bb
13from bb import methodpool 14from bb import methodpool
14from bb.parse import logger 15from bb.parse import logger
@@ -130,6 +131,10 @@ class DataNode(AstNode):
130 else: 131 else:
131 val = groupd["value"] 132 val = groupd["value"]
132 133
134 if ":append" in key or ":remove" in key or ":prepend" in key:
135 if op in ["append", "prepend", "postdot", "predot", "ques"]:
136 bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.")
137
133 flag = None 138 flag = None
134 if 'flag' in groupd and groupd['flag'] is not None: 139 if 'flag' in groupd and groupd['flag'] is not None:
135 flag = groupd['flag'] 140 flag = groupd['flag']
@@ -145,7 +150,7 @@ class DataNode(AstNode):
145 data.setVar(key, val, parsing=True, **loginfo) 150 data.setVar(key, val, parsing=True, **loginfo)
146 151
147class MethodNode(AstNode): 152class MethodNode(AstNode):
148 tr_tbl = str.maketrans('/.+-@%&', '_______') 153 tr_tbl = str.maketrans('/.+-@%&~', '________')
149 154
150 def __init__(self, filename, lineno, func_name, body, python, fakeroot): 155 def __init__(self, filename, lineno, func_name, body, python, fakeroot):
151 AstNode.__init__(self, filename, lineno) 156 AstNode.__init__(self, filename, lineno)
@@ -206,10 +211,12 @@ class ExportFuncsNode(AstNode):
206 211
207 def eval(self, data): 212 def eval(self, data):
208 213
214 sentinel = " # Export function set\n"
209 for func in self.n: 215 for func in self.n:
210 calledfunc = self.classname + "_" + func 216 calledfunc = self.classname + "_" + func
211 217
212 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): 218 basevar = data.getVar(func, False)
219 if basevar and sentinel not in basevar:
213 continue 220 continue
214 221
215 if data.getVar(func, False): 222 if data.getVar(func, False):
@@ -219,19 +226,18 @@ class ExportFuncsNode(AstNode):
219 for flag in [ "func", "python" ]: 226 for flag in [ "func", "python" ]:
220 if data.getVarFlag(calledfunc, flag, False): 227 if data.getVarFlag(calledfunc, flag, False):
221 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) 228 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
222 for flag in [ "dirs" ]: 229 for flag in ["dirs", "cleandirs", "fakeroot"]:
223 if data.getVarFlag(func, flag, False): 230 if data.getVarFlag(func, flag, False):
224 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) 231 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
225 data.setVarFlag(func, "filename", "autogenerated") 232 data.setVarFlag(func, "filename", "autogenerated")
226 data.setVarFlag(func, "lineno", 1) 233 data.setVarFlag(func, "lineno", 1)
227 234
228 if data.getVarFlag(calledfunc, "python", False): 235 if data.getVarFlag(calledfunc, "python", False):
229 data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) 236 data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
230 else: 237 else:
231 if "-" in self.classname: 238 if "-" in self.classname:
232 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) 239 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
233 data.setVar(func, " " + calledfunc + "\n", parsing=True) 240 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
234 data.setVarFlag(func, 'export_func', '1')
235 241
236class AddTaskNode(AstNode): 242class AddTaskNode(AstNode):
237 def __init__(self, filename, lineno, func, before, after): 243 def __init__(self, filename, lineno, func, before, after):
@@ -265,6 +271,41 @@ class BBHandlerNode(AstNode):
265 data.setVarFlag(h, "handler", 1) 271 data.setVarFlag(h, "handler", 1)
266 data.setVar('__BBHANDLERS', bbhands) 272 data.setVar('__BBHANDLERS', bbhands)
267 273
274class PyLibNode(AstNode):
275 def __init__(self, filename, lineno, libdir, namespace):
276 AstNode.__init__(self, filename, lineno)
277 self.libdir = libdir
278 self.namespace = namespace
279
280 def eval(self, data):
281 global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split()
282 for m in global_mods:
283 if m not in bb.utils._context:
284 bb.utils._context[m] = __import__(m)
285
286 libdir = data.expand(self.libdir)
287 if libdir not in sys.path:
288 sys.path.append(libdir)
289 try:
290 bb.utils._context[self.namespace] = __import__(self.namespace)
291 toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", [])
292 for i in toimport:
293 bb.utils._context[self.namespace] = __import__(self.namespace + "." + i)
294 mod = getattr(bb.utils._context[self.namespace], i)
295 fn = getattr(mod, "__file__")
296 funcs = {}
297 for f in dir(mod):
298 if f.startswith("_"):
299 continue
300 fcall = getattr(mod, f)
301 if not callable(fcall):
302 continue
303 funcs[f] = fcall
304 bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i))
305
306 except AttributeError as e:
307 bb.error("Error importing OE modules: %s" % str(e))
308
268class InheritNode(AstNode): 309class InheritNode(AstNode):
269 def __init__(self, filename, lineno, classes): 310 def __init__(self, filename, lineno, classes):
270 AstNode.__init__(self, filename, lineno) 311 AstNode.__init__(self, filename, lineno)
@@ -273,6 +314,16 @@ class InheritNode(AstNode):
273 def eval(self, data): 314 def eval(self, data):
274 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) 315 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
275 316
317class InheritDeferredNode(AstNode):
318 def __init__(self, filename, lineno, classes):
319 AstNode.__init__(self, filename, lineno)
320 self.inherit = (classes, filename, lineno)
321
322 def eval(self, data):
323 inherits = data.getVar('__BBDEFINHERITS', False) or []
324 inherits.append(self.inherit)
325 data.setVar('__BBDEFINHERITS', inherits)
326
276def handleInclude(statements, filename, lineno, m, force): 327def handleInclude(statements, filename, lineno, m, force):
277 statements.append(IncludeNode(filename, lineno, m.group(1), force)) 328 statements.append(IncludeNode(filename, lineno, m.group(1), force))
278 329
@@ -316,10 +367,17 @@ def handleDelTask(statements, filename, lineno, m):
316def handleBBHandlers(statements, filename, lineno, m): 367def handleBBHandlers(statements, filename, lineno, m):
317 statements.append(BBHandlerNode(filename, lineno, m.group(1))) 368 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
318 369
370def handlePyLib(statements, filename, lineno, m):
371 statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2)))
372
319def handleInherit(statements, filename, lineno, m): 373def handleInherit(statements, filename, lineno, m):
320 classes = m.group(1) 374 classes = m.group(1)
321 statements.append(InheritNode(filename, lineno, classes)) 375 statements.append(InheritNode(filename, lineno, classes))
322 376
377def handleInheritDeferred(statements, filename, lineno, m):
378 classes = m.group(1)
379 statements.append(InheritDeferredNode(filename, lineno, classes))
380
323def runAnonFuncs(d): 381def runAnonFuncs(d):
324 code = [] 382 code = []
325 for funcname in d.getVar("__BBANONFUNCS", False) or []: 383 for funcname in d.getVar("__BBANONFUNCS", False) or []:
@@ -329,6 +387,10 @@ def runAnonFuncs(d):
329def finalize(fn, d, variant = None): 387def finalize(fn, d, variant = None):
330 saved_handlers = bb.event.get_handlers().copy() 388 saved_handlers = bb.event.get_handlers().copy()
331 try: 389 try:
390 # Found renamed variables. Exit immediately
391 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
392 raise bb.BBHandledException()
393
332 for var in d.getVar('__BBHANDLERS', False) or []: 394 for var in d.getVar('__BBHANDLERS', False) or []:
333 # try to add the handler 395 # try to add the handler
334 handlerfn = d.getVarFlag(var, "filename", False) 396 handlerfn = d.getVarFlag(var, "filename", False)
@@ -353,6 +415,9 @@ def finalize(fn, d, variant = None):
353 415
354 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) 416 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
355 417
418 if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"):
419 bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.")
420
356 bb.event.fire(bb.event.RecipeParsed(fn), d) 421 bb.event.fire(bb.event.RecipeParsed(fn), d)
357 finally: 422 finally:
358 bb.event.set_handlers(saved_handlers) 423 bb.event.set_handlers(saved_handlers)
@@ -379,6 +444,14 @@ def multi_finalize(fn, d):
379 logger.debug("Appending .bbappend file %s to %s", append, fn) 444 logger.debug("Appending .bbappend file %s to %s", append, fn)
380 bb.parse.BBHandler.handle(append, d, True) 445 bb.parse.BBHandler.handle(append, d, True)
381 446
447 while True:
448 inherits = d.getVar('__BBDEFINHERITS', False) or []
449 if not inherits:
450 break
451 inherit, filename, lineno = inherits.pop(0)
452 d.setVar('__BBDEFINHERITS', inherits)
453 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
454
382 onlyfinalise = d.getVar("__ONLYFINALISE", False) 455 onlyfinalise = d.getVar("__ONLYFINALISE", False)
383 456
384 safe_d = d 457 safe_d = d
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index f8988b8631..c13e4b9755 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -19,11 +19,9 @@ from . import ConfHandler
19from .. import resolve_file, ast, logger, ParseError 19from .. import resolve_file, ast, logger, ParseError
20from .ConfHandler import include, init 20from .ConfHandler import include, init
21 21
22# For compatibility 22__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
24
25__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 26__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
29__deltask_regexp__ = re.compile(r"deltask\s+(.+)") 27__deltask_regexp__ = re.compile(r"deltask\s+(.+)")
@@ -36,6 +34,7 @@ __infunc__ = []
36__inpython__ = False 34__inpython__ = False
37__body__ = [] 35__body__ = []
38__classname__ = "" 36__classname__ = ""
37__residue__ = []
39 38
40cached_statements = {} 39cached_statements = {}
41 40
@@ -43,31 +42,46 @@ def supports(fn, d):
43 """Return True if fn has a supported extension""" 42 """Return True if fn has a supported extension"""
44 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] 43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
45 44
46def inherit(files, fn, lineno, d): 45def inherit(files, fn, lineno, d, deferred=False):
47 __inherit_cache = d.getVar('__inherit_cache', False) or [] 46 __inherit_cache = d.getVar('__inherit_cache', False) or []
47 #if "${" in files and not deferred:
48 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
48 files = d.expand(files).split() 49 files = d.expand(files).split()
49 for file in files: 50 for file in files:
50 if not os.path.isabs(file) and not file.endswith(".bbclass"): 51 classtype = d.getVar("__bbclasstype", False)
51 file = os.path.join('classes', '%s.bbclass' % file) 52 origfile = file
52 53 for t in ["classes-" + classtype, "classes"]:
53 if not os.path.isabs(file): 54 file = origfile
54 bbpath = d.getVar("BBPATH") 55 if not os.path.isabs(file) and not file.endswith(".bbclass"):
55 abs_fn, attempts = bb.utils.which(bbpath, file, history=True) 56 file = os.path.join(t, '%s.bbclass' % file)
56 for af in attempts: 57
57 if af != abs_fn: 58 if not os.path.isabs(file):
58 bb.parse.mark_dependency(d, af) 59 bbpath = d.getVar("BBPATH")
59 if abs_fn: 60 abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
60 file = abs_fn 61 for af in attempts:
62 if af != abs_fn:
63 bb.parse.mark_dependency(d, af)
64 if abs_fn:
65 file = abs_fn
66
67 if os.path.exists(file):
68 break
69
70 if not os.path.exists(file):
71 raise ParseError("Could not inherit file %s" % (file), fn, lineno)
61 72
62 if not file in __inherit_cache: 73 if not file in __inherit_cache:
63 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) 74 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
64 __inherit_cache.append( file ) 75 __inherit_cache.append( file )
65 d.setVar('__inherit_cache', __inherit_cache) 76 d.setVar('__inherit_cache', __inherit_cache)
66 include(fn, file, lineno, d, "inherit") 77 try:
78 bb.parse.handle(file, d, True)
79 except (IOError, OSError) as exc:
80 raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno)
67 __inherit_cache = d.getVar('__inherit_cache', False) or [] 81 __inherit_cache = d.getVar('__inherit_cache', False) or []
68 82
69def get_statements(filename, absolute_filename, base_name): 83def get_statements(filename, absolute_filename, base_name):
70 global cached_statements 84 global cached_statements, __residue__, __body__
71 85
72 try: 86 try:
73 return cached_statements[absolute_filename] 87 return cached_statements[absolute_filename]
@@ -87,12 +101,17 @@ def get_statements(filename, absolute_filename, base_name):
87 # add a blank line to close out any python definition 101 # add a blank line to close out any python definition
88 feeder(lineno, "", filename, base_name, statements, eof=True) 102 feeder(lineno, "", filename, base_name, statements, eof=True)
89 103
104 if __residue__:
105 raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno)
106 if __body__:
107 raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno)
108
90 if filename.endswith(".bbclass") or filename.endswith(".inc"): 109 if filename.endswith(".bbclass") or filename.endswith(".inc"):
91 cached_statements[absolute_filename] = statements 110 cached_statements[absolute_filename] = statements
92 return statements 111 return statements
93 112
94def handle(fn, d, include): 113def handle(fn, d, include, baseconfig=False):
95 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ 114 global __infunc__, __body__, __residue__, __classname__
96 __body__ = [] 115 __body__ = []
97 __infunc__ = [] 116 __infunc__ = []
98 __classname__ = "" 117 __classname__ = ""
@@ -144,7 +163,7 @@ def handle(fn, d, include):
144 return d 163 return d
145 164
146def feeder(lineno, s, fn, root, statements, eof=False): 165def feeder(lineno, s, fn, root, statements, eof=False):
147 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__ 166 global __inpython__, __infunc__, __body__, __residue__, __classname__
148 167
149 # Check tabs in python functions: 168 # Check tabs in python functions:
150 # - def py_funcname(): covered by __inpython__ 169 # - def py_funcname(): covered by __inpython__
@@ -181,10 +200,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
181 200
182 if s and s[0] == '#': 201 if s and s[0] == '#':
183 if len(__residue__) != 0 and __residue__[0][0] != "#": 202 if len(__residue__) != 0 and __residue__[0][0] != "#":
184 bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) 203 bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s))
185 204
186 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): 205 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
187 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 206 bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__)))
188 207
189 if s and s[-1] == '\\': 208 if s and s[-1] == '\\':
190 __residue__.append(s[:-1]) 209 __residue__.append(s[:-1])
@@ -255,7 +274,12 @@ def feeder(lineno, s, fn, root, statements, eof=False):
255 ast.handleInherit(statements, fn, lineno, m) 274 ast.handleInherit(statements, fn, lineno, m)
256 return 275 return
257 276
258 return ConfHandler.feeder(lineno, s, fn, statements) 277 m = __inherit_def_regexp__.match(s)
278 if m:
279 ast.handleInheritDeferred(statements, fn, lineno, m)
280 return
281
282 return ConfHandler.feeder(lineno, s, fn, statements, conffile=False)
259 283
260# Add us to the handlers list 284# Add us to the handlers list
261from .. import handlers 285from .. import handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index f171c5c932..7826dee7d3 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,8 +20,8 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]+?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]*)\])?
25 25
26 \s* ( 26 \s* (
27 (?P<colon>:=) | 27 (?P<colon>:=) |
@@ -45,13 +45,11 @@ __include_regexp__ = re.compile( r"include\s+(.+)" )
45__require_regexp__ = re.compile( r"require\s+(.+)" ) 45__require_regexp__ = re.compile( r"require\s+(.+)" )
46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" ) 48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
49__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
49 50
50def init(data): 51def init(data):
51 topdir = data.getVar('TOPDIR', False) 52 return
52 if not topdir:
53 data.setVar('TOPDIR', os.getcwd())
54
55 53
56def supports(fn, d): 54def supports(fn, d):
57 return fn[-5:] == ".conf" 55 return fn[-5:] == ".conf"
@@ -105,12 +103,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
105# We have an issue where a UI might want to enforce particular settings such as 103# We have an issue where a UI might want to enforce particular settings such as
106# an empty DISTRO variable. If configuration files do something like assigning 104# an empty DISTRO variable. If configuration files do something like assigning
107# a weak default, it turns out to be very difficult to filter out these changes, 105# a weak default, it turns out to be very difficult to filter out these changes,
108# particularly when the weak default might appear half way though parsing a chain 106# particularly when the weak default might appear half way though parsing a chain
109# of configuration files. We therefore let the UIs hook into configuration file 107# of configuration files. We therefore let the UIs hook into configuration file
110# parsing. This turns out to be a hard problem to solve any other way. 108# parsing. This turns out to be a hard problem to solve any other way.
111confFilters = [] 109confFilters = []
112 110
113def handle(fn, data, include): 111def handle(fn, data, include, baseconfig=False):
114 init(data) 112 init(data)
115 113
116 if include == 0: 114 if include == 0:
@@ -128,21 +126,26 @@ def handle(fn, data, include):
128 s = f.readline() 126 s = f.readline()
129 if not s: 127 if not s:
130 break 128 break
129 origlineno = lineno
130 origline = s
131 w = s.strip() 131 w = s.strip()
132 # skip empty lines 132 # skip empty lines
133 if not w: 133 if not w:
134 continue 134 continue
135 s = s.rstrip() 135 s = s.rstrip()
136 while s[-1] == '\\': 136 while s[-1] == '\\':
137 s2 = f.readline().rstrip() 137 line = f.readline()
138 origline += line
139 s2 = line.rstrip()
138 lineno = lineno + 1 140 lineno = lineno + 1
139 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : 141 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
140 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 142 bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline))
143
141 s = s[:-1] + s2 144 s = s[:-1] + s2
142 # skip comments 145 # skip comments
143 if s[0] == '#': 146 if s[0] == '#':
144 continue 147 continue
145 feeder(lineno, s, abs_fn, statements) 148 feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig)
146 149
147 # DONE WITH PARSING... time to evaluate 150 # DONE WITH PARSING... time to evaluate
148 data.setVar('FILE', abs_fn) 151 data.setVar('FILE', abs_fn)
@@ -150,14 +153,14 @@ def handle(fn, data, include):
150 if oldfile: 153 if oldfile:
151 data.setVar('FILE', oldfile) 154 data.setVar('FILE', oldfile)
152 155
153 f.close()
154
155 for f in confFilters: 156 for f in confFilters:
156 f(fn, data) 157 f(fn, data)
157 158
158 return data 159 return data
159 160
160def feeder(lineno, s, fn, statements): 161# baseconfig is set for the bblayers/layer.conf cookerdata config parsing
162# The function is also used by BBHandler, conffile would be False
163def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
161 m = __config_regexp__.match(s) 164 m = __config_regexp__.match(s)
162 if m: 165 if m:
163 groupd = m.groupdict() 166 groupd = m.groupdict()
@@ -189,6 +192,11 @@ def feeder(lineno, s, fn, statements):
189 ast.handleUnsetFlag(statements, fn, lineno, m) 192 ast.handleUnsetFlag(statements, fn, lineno, m)
190 return 193 return
191 194
195 m = __addpylib_regexp__.match(s)
196 if baseconfig and conffile and m:
197 ast.handlePyLib(statements, fn, lineno, m)
198 return
199
192 raise ParseError("unparsed line: '%s'" % s, fn, lineno); 200 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
193 201
194# Add us to the handlers list 202# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
index c6a209fb3f..bcca791edf 100644
--- a/bitbake/lib/bb/persist_data.py
+++ b/bitbake/lib/bb/persist_data.py
@@ -12,14 +12,14 @@ currently, providing a key/value store accessed by 'domain'.
12# 12#
13 13
14import collections 14import collections
15import collections.abc
15import contextlib 16import contextlib
16import functools 17import functools
17import logging 18import logging
18import os.path 19import os.path
19import sqlite3 20import sqlite3
20import sys 21import sys
21import warnings 22from collections.abc import Mapping
22from collections import Mapping
23 23
24sqlversion = sqlite3.sqlite_version_info 24sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): 25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
@@ -29,7 +29,7 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
29logger = logging.getLogger("BitBake.PersistData") 29logger = logging.getLogger("BitBake.PersistData")
30 30
31@functools.total_ordering 31@functools.total_ordering
32class SQLTable(collections.MutableMapping): 32class SQLTable(collections.abc.MutableMapping):
33 class _Decorators(object): 33 class _Decorators(object):
34 @staticmethod 34 @staticmethod
35 def retry(*, reconnect=True): 35 def retry(*, reconnect=True):
@@ -63,7 +63,7 @@ class SQLTable(collections.MutableMapping):
63 """ 63 """
64 Decorator that starts a database transaction and creates a database 64 Decorator that starts a database transaction and creates a database
65 cursor for performing queries. If no exception is thrown, the 65 cursor for performing queries. If no exception is thrown, the
66 database results are commited. If an exception occurs, the database 66 database results are committed. If an exception occurs, the database
67 is rolled back. In all cases, the cursor is closed after the 67 is rolled back. In all cases, the cursor is closed after the
68 function ends. 68 function ends.
69 69
@@ -208,7 +208,7 @@ class SQLTable(collections.MutableMapping):
208 208
209 def __lt__(self, other): 209 def __lt__(self, other):
210 if not isinstance(other, Mapping): 210 if not isinstance(other, Mapping):
211 raise NotImplemented 211 raise NotImplementedError()
212 212
213 return len(self) < len(other) 213 return len(self) < len(other)
214 214
@@ -238,55 +238,6 @@ class SQLTable(collections.MutableMapping):
238 def has_key(self, key): 238 def has_key(self, key):
239 return key in self 239 return key in self
240 240
241
242class PersistData(object):
243 """Deprecated representation of the bitbake persistent data store"""
244 def __init__(self, d):
245 warnings.warn("Use of PersistData is deprecated. Please use "
246 "persist(domain, d) instead.",
247 category=DeprecationWarning,
248 stacklevel=2)
249
250 self.data = persist(d)
251 logger.debug("Using '%s' as the persistent data cache",
252 self.data.filename)
253
254 def addDomain(self, domain):
255 """
256 Add a domain (pending deprecation)
257 """
258 return self.data[domain]
259
260 def delDomain(self, domain):
261 """
262 Removes a domain and all the data it contains
263 """
264 del self.data[domain]
265
266 def getKeyValues(self, domain):
267 """
268 Return a list of key + value pairs for a domain
269 """
270 return list(self.data[domain].items())
271
272 def getValue(self, domain, key):
273 """
274 Return the value of a key for a domain
275 """
276 return self.data[domain][key]
277
278 def setValue(self, domain, key, value):
279 """
280 Sets the value of a key for a domain
281 """
282 self.data[domain][key] = value
283
284 def delValue(self, domain, key):
285 """
286 Deletes a key/value pair
287 """
288 del self.data[domain][key]
289
290def persist(domain, d): 241def persist(domain, d):
291 """Convenience factory for SQLTable objects based upon metadata""" 242 """Convenience factory for SQLTable objects based upon metadata"""
292 import bb.utils 243 import bb.utils
@@ -298,4 +249,23 @@ def persist(domain, d):
298 249
299 bb.utils.mkdirhier(cachedir) 250 bb.utils.mkdirhier(cachedir)
300 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3") 251 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
301 return SQLTable(cachefile, domain) 252
253 try:
254 return SQLTable(cachefile, domain)
255 except sqlite3.OperationalError:
256 # Sqlite fails to open database when its path is too long.
257 # After testing, 504 is the biggest path length that can be opened by
258 # sqlite.
259 # Note: This code is called before sanity.bbclass and its path length
260 # check
261 max_len = 504
262 if len(cachefile) > max_len:
263 logger.critical("The path of the cache file is too long "
264 "({0} chars > {1}) to be opened by sqlite! "
265 "Your cache file is \"{2}\"".format(
266 len(cachefile),
267 max_len,
268 cachefile))
269 sys.exit(1)
270 else:
271 raise
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index 7c3995cce5..4c7b6d39df 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -60,7 +62,7 @@ class Popen(subprocess.Popen):
60 "close_fds": True, 62 "close_fds": True,
61 "preexec_fn": subprocess_setup, 63 "preexec_fn": subprocess_setup,
62 "stdout": subprocess.PIPE, 64 "stdout": subprocess.PIPE,
63 "stderr": subprocess.STDOUT, 65 "stderr": subprocess.PIPE,
64 "stdin": subprocess.PIPE, 66 "stdin": subprocess.PIPE,
65 "shell": False, 67 "shell": False,
66 } 68 }
@@ -142,7 +144,7 @@ def _logged_communicate(pipe, log, input, extrafiles):
142 while pipe.poll() is None: 144 while pipe.poll() is None:
143 read_all_pipes(log, rin, outdata, errdata) 145 read_all_pipes(log, rin, outdata, errdata)
144 146
145 # Pocess closed, drain all pipes... 147 # Process closed, drain all pipes...
146 read_all_pipes(log, rin, outdata, errdata) 148 read_all_pipes(log, rin, outdata, errdata)
147 finally: 149 finally:
148 log.flush() 150 log.flush()
@@ -181,5 +183,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
181 stderr = stderr.decode("utf-8") 183 stderr = stderr.decode("utf-8")
182 184
183 if pipe.returncode != 0: 185 if pipe.returncode != 0:
186 if log:
187 # Don't duplicate the output in the exception if logging it
188 raise ExecutionError(cmd, pipe.returncode, None, None)
184 raise ExecutionError(cmd, pipe.returncode, stdout, stderr) 189 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
185 return stdout, stderr 190 return stdout, stderr
diff --git a/bitbake/lib/bb/progress.py b/bitbake/lib/bb/progress.py
index d051ba0198..9518be77fb 100644
--- a/bitbake/lib/bb/progress.py
+++ b/bitbake/lib/bb/progress.py
@@ -94,12 +94,15 @@ class LineFilterProgressHandler(ProgressHandler):
94 while True: 94 while True:
95 breakpos = self._linebuffer.find('\n') + 1 95 breakpos = self._linebuffer.find('\n') + 1
96 if breakpos == 0: 96 if breakpos == 0:
97 break 97 # for the case when the line with progress ends with only '\r'
98 breakpos = self._linebuffer.find('\r') + 1
99 if breakpos == 0:
100 break
98 line = self._linebuffer[:breakpos] 101 line = self._linebuffer[:breakpos]
99 self._linebuffer = self._linebuffer[breakpos:] 102 self._linebuffer = self._linebuffer[breakpos:]
100 # Drop any line feeds and anything that precedes them 103 # Drop any line feeds and anything that precedes them
101 lbreakpos = line.rfind('\r') + 1 104 lbreakpos = line.rfind('\r') + 1
102 if lbreakpos: 105 if lbreakpos and lbreakpos != breakpos:
103 line = line[lbreakpos:] 106 line = line[lbreakpos:]
104 if self.writeline(filter_color(line)): 107 if self.writeline(filter_color(line)):
105 super().write(line) 108 super().write(line)
@@ -145,7 +148,7 @@ class MultiStageProgressReporter:
145 for tasks made up of python code spread across multiple 148 for tasks made up of python code spread across multiple
146 classes / functions - the progress reporter object can 149 classes / functions - the progress reporter object can
147 be passed around or stored at the object level and calls 150 be passed around or stored at the object level and calls
148 to next_stage() and update() made whereever needed. 151 to next_stage() and update() made wherever needed.
149 """ 152 """
150 def __init__(self, d, stage_weights, debug=False): 153 def __init__(self, d, stage_weights, debug=False):
151 """ 154 """
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index b5a6cd0090..e11a4637d1 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -38,16 +38,17 @@ def findProviders(cfgData, dataCache, pkg_pn = None):
38 localdata = data.createCopy(cfgData) 38 localdata = data.createCopy(cfgData)
39 bb.data.expandKeys(localdata) 39 bb.data.expandKeys(localdata)
40 40
41 required = {}
41 preferred_versions = {} 42 preferred_versions = {}
42 latest_versions = {} 43 latest_versions = {}
43 44
44 for pn in pkg_pn: 45 for pn in pkg_pn:
45 (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn) 46 (last_ver, last_file, pref_ver, pref_file, req) = findBestProvider(pn, localdata, dataCache, pkg_pn)
46 preferred_versions[pn] = (pref_ver, pref_file) 47 preferred_versions[pn] = (pref_ver, pref_file)
47 latest_versions[pn] = (last_ver, last_file) 48 latest_versions[pn] = (last_ver, last_file)
49 required[pn] = req
48 50
49 return (latest_versions, preferred_versions) 51 return (latest_versions, preferred_versions, required)
50
51 52
52def allProviders(dataCache): 53def allProviders(dataCache):
53 """ 54 """
@@ -59,7 +60,6 @@ def allProviders(dataCache):
59 all_providers[pn].append((ver, fn)) 60 all_providers[pn].append((ver, fn))
60 return all_providers 61 return all_providers
61 62
62
63def sortPriorities(pn, dataCache, pkg_pn = None): 63def sortPriorities(pn, dataCache, pkg_pn = None):
64 """ 64 """
65 Reorder pkg_pn by file priority and default preference 65 Reorder pkg_pn by file priority and default preference
@@ -87,6 +87,21 @@ def sortPriorities(pn, dataCache, pkg_pn = None):
87 87
88 return tmp_pn 88 return tmp_pn
89 89
90def versionVariableMatch(cfgData, keyword, pn):
91 """
92 Return the value of the <keyword>_VERSION variable if set.
93 """
94
95 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
96 # hence we do this manually rather than use OVERRIDES
97 ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn))
98 if not ver:
99 ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
100 if not ver:
101 ver = cfgData.getVar("%s_VERSION" % keyword)
102
103 return ver
104
90def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): 105def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
91 """ 106 """
92 Check if the version pe,pv,pr is the preferred one. 107 Check if the version pe,pv,pr is the preferred one.
@@ -102,19 +117,28 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
102 117
103def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 118def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
104 """ 119 """
105 Find the first provider in pkg_pn with a PREFERRED_VERSION set. 120 Find the first provider in pkg_pn with REQUIRED_VERSION or PREFERRED_VERSION set.
106 """ 121 """
107 122
108 preferred_file = None 123 preferred_file = None
109 preferred_ver = None 124 preferred_ver = None
125 required = False
110 126
111 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot 127 required_v = versionVariableMatch(cfgData, "REQUIRED", pn)
112 # hence we do this manually rather than use OVERRIDES 128 preferred_v = versionVariableMatch(cfgData, "PREFERRED", pn)
113 preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) 129
114 if not preferred_v: 130 itemstr = ""
115 preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) 131 if item:
116 if not preferred_v: 132 itemstr = " (for item %s)" % item
117 preferred_v = cfgData.getVar("PREFERRED_VERSION") 133
134 if required_v is not None:
135 if preferred_v is not None:
136 logger.warning("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
137 else:
138 logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr)
139 # REQUIRED_VERSION always takes precedence over PREFERRED_VERSION
140 preferred_v = required_v
141 required = True
118 142
119 if preferred_v: 143 if preferred_v:
120 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v) 144 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
@@ -147,11 +171,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
147 pv_str = preferred_v 171 pv_str = preferred_v
148 if not (preferred_e is None): 172 if not (preferred_e is None):
149 pv_str = '%s:%s' % (preferred_e, pv_str) 173 pv_str = '%s:%s' % (preferred_e, pv_str)
150 itemstr = ""
151 if item:
152 itemstr = " (for item %s)" % item
153 if preferred_file is None: 174 if preferred_file is None:
154 logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr) 175 if not required:
176 logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
155 available_vers = [] 177 available_vers = []
156 for file_set in pkg_pn: 178 for file_set in pkg_pn:
157 for f in file_set: 179 for f in file_set:
@@ -163,12 +185,16 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
163 available_vers.append(ver_str) 185 available_vers.append(ver_str)
164 if available_vers: 186 if available_vers:
165 available_vers.sort() 187 available_vers.sort()
166 logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) 188 logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
189 if required:
190 logger.error("required version %s of %s not available%s", pv_str, pn, itemstr)
167 else: 191 else:
168 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) 192 if required:
169 193 logger.debug("selecting %s as REQUIRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
170 return (preferred_ver, preferred_file) 194 else:
195 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
171 196
197 return (preferred_ver, preferred_file, required)
172 198
173def findLatestProvider(pn, cfgData, dataCache, file_set): 199def findLatestProvider(pn, cfgData, dataCache, file_set):
174 """ 200 """
@@ -189,7 +215,6 @@ def findLatestProvider(pn, cfgData, dataCache, file_set):
189 215
190 return (latest, latest_f) 216 return (latest, latest_f)
191 217
192
193def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 218def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
194 """ 219 """
195 If there is a PREFERRED_VERSION, find the highest-priority bbfile 220 If there is a PREFERRED_VERSION, find the highest-priority bbfile
@@ -198,17 +223,16 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
198 """ 223 """
199 224
200 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) 225 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
201 # Find the highest priority provider with a PREFERRED_VERSION set 226 # Find the highest priority provider with a REQUIRED_VERSION or PREFERRED_VERSION set
202 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) 227 (preferred_ver, preferred_file, required) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
203 # Find the latest version of the highest priority provider 228 # Find the latest version of the highest priority provider
204 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) 229 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
205 230
206 if preferred_file is None: 231 if not required and preferred_file is None:
207 preferred_file = latest_f 232 preferred_file = latest_f
208 preferred_ver = latest 233 preferred_ver = latest
209 234
210 return (latest, latest_f, preferred_ver, preferred_file) 235 return (latest, latest_f, preferred_ver, preferred_file, required)
211
212 236
213def _filterProviders(providers, item, cfgData, dataCache): 237def _filterProviders(providers, item, cfgData, dataCache):
214 """ 238 """
@@ -234,10 +258,13 @@ def _filterProviders(providers, item, cfgData, dataCache):
234 258
235 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) 259 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
236 260
237 # First add PREFERRED_VERSIONS 261 # First add REQUIRED_VERSIONS or PREFERRED_VERSIONS
238 for pn in sorted(pkg_pn): 262 for pn in sorted(pkg_pn):
239 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) 263 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
240 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) 264 preferred_ver, preferred_file, required = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
265 if required and preferred_file is None:
266 return eligible
267 preferred_versions[pn] = (preferred_ver, preferred_file)
241 if preferred_versions[pn][1]: 268 if preferred_versions[pn][1]:
242 eligible.append(preferred_versions[pn][1]) 269 eligible.append(preferred_versions[pn][1])
243 270
@@ -249,7 +276,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
249 eligible.append(preferred_versions[pn][1]) 276 eligible.append(preferred_versions[pn][1])
250 277
251 if not eligible: 278 if not eligible:
252 logger.error("no eligible providers for %s", item)
253 return eligible 279 return eligible
254 280
255 # If pn == item, give it a slight default preference 281 # If pn == item, give it a slight default preference
@@ -266,7 +292,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
266 292
267 return eligible 293 return eligible
268 294
269
270def filterProviders(providers, item, cfgData, dataCache): 295def filterProviders(providers, item, cfgData, dataCache):
271 """ 296 """
272 Take a list of providers and filter/reorder according to the 297 Take a list of providers and filter/reorder according to the
@@ -371,8 +396,8 @@ def getRuntimeProviders(dataCache, rdepend):
371 return rproviders 396 return rproviders
372 397
373 # Only search dynamic packages if we can't find anything in other variables 398 # Only search dynamic packages if we can't find anything in other variables
374 for pattern in dataCache.packages_dynamic: 399 for pat_key in dataCache.packages_dynamic:
375 pattern = pattern.replace(r'+', r"\+") 400 pattern = pat_key.replace(r'+', r"\+")
376 if pattern in regexp_cache: 401 if pattern in regexp_cache:
377 regexp = regexp_cache[pattern] 402 regexp = regexp_cache[pattern]
378 else: 403 else:
@@ -383,12 +408,11 @@ def getRuntimeProviders(dataCache, rdepend):
383 raise 408 raise
384 regexp_cache[pattern] = regexp 409 regexp_cache[pattern] = regexp
385 if regexp.match(rdepend): 410 if regexp.match(rdepend):
386 rproviders += dataCache.packages_dynamic[pattern] 411 rproviders += dataCache.packages_dynamic[pat_key]
387 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) 412 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
388 413
389 return rproviders 414 return rproviders
390 415
391
392def buildWorldTargetList(dataCache, task=None): 416def buildWorldTargetList(dataCache, task=None):
393 """ 417 """
394 Build package list for "bitbake world" 418 Build package list for "bitbake world"
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 54ef245a63..bc7e18175d 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -24,6 +24,7 @@ import pickle
24from multiprocessing import Process 24from multiprocessing import Process
25import shlex 25import shlex
26import pprint 26import pprint
27import time
27 28
28bblogger = logging.getLogger("BitBake") 29bblogger = logging.getLogger("BitBake")
29logger = logging.getLogger("BitBake.RunQueue") 30logger = logging.getLogger("BitBake.RunQueue")
@@ -85,15 +86,19 @@ class RunQueueStats:
85 """ 86 """
86 Holds statistics on the tasks handled by the associated runQueue 87 Holds statistics on the tasks handled by the associated runQueue
87 """ 88 """
88 def __init__(self, total): 89 def __init__(self, total, setscene_total):
89 self.completed = 0 90 self.completed = 0
90 self.skipped = 0 91 self.skipped = 0
91 self.failed = 0 92 self.failed = 0
92 self.active = 0 93 self.active = 0
94 self.setscene_active = 0
95 self.setscene_covered = 0
96 self.setscene_notcovered = 0
97 self.setscene_total = setscene_total
93 self.total = total 98 self.total = total
94 99
95 def copy(self): 100 def copy(self):
96 obj = self.__class__(self.total) 101 obj = self.__class__(self.total, self.setscene_total)
97 obj.__dict__.update(self.__dict__) 102 obj.__dict__.update(self.__dict__)
98 return obj 103 return obj
99 104
@@ -112,6 +117,13 @@ class RunQueueStats:
112 def taskActive(self): 117 def taskActive(self):
113 self.active = self.active + 1 118 self.active = self.active + 1
114 119
120 def updateCovered(self, covered, notcovered):
121 self.setscene_covered = covered
122 self.setscene_notcovered = notcovered
123
124 def updateActiveSetscene(self, active):
125 self.setscene_active = active
126
115# These values indicate the next step due to be run in the 127# These values indicate the next step due to be run in the
116# runQueue state machine 128# runQueue state machine
117runQueuePrepare = 2 129runQueuePrepare = 2
@@ -143,11 +155,82 @@ class RunQueueScheduler(object):
143 self.stamps = {} 155 self.stamps = {}
144 for tid in self.rqdata.runtaskentries: 156 for tid in self.rqdata.runtaskentries:
145 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 157 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
146 self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 158 self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
147 if tid in self.rq.runq_buildable: 159 if tid in self.rq.runq_buildable:
148 self.buildable.append(tid) 160 self.buildable.add(tid)
149 161
150 self.rev_prio_map = None 162 self.rev_prio_map = None
163 self.is_pressure_usable()
164
165 def is_pressure_usable(self):
166 """
167 If monitoring pressure, return True if pressure files can be open and read. For example
168 openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
169 is returned.
170 """
171 if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
172 try:
173 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
174 open("/proc/pressure/io") as io_pressure_fds, \
175 open("/proc/pressure/memory") as memory_pressure_fds:
176
177 self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
178 self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
179 self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
180 self.prev_pressure_time = time.time()
181 self.check_pressure = True
182 except:
183 bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
184 self.check_pressure = False
185 else:
186 self.check_pressure = False
187
188 def exceeds_max_pressure(self):
189 """
190 Monitor the difference in total pressure at least once per second, if
191 BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
192 """
193 if self.check_pressure:
194 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
195 open("/proc/pressure/io") as io_pressure_fds, \
196 open("/proc/pressure/memory") as memory_pressure_fds:
197 # extract "total" from /proc/pressure/{cpu|io}
198 curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
199 curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
200 curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
201 now = time.time()
202 tdiff = now - self.prev_pressure_time
203 psi_accumulation_interval = 1.0
204 cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
205 io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
206 memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
207 exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
208 exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
209 exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
210
211 if tdiff > psi_accumulation_interval:
212 self.prev_cpu_pressure = curr_cpu_pressure
213 self.prev_io_pressure = curr_io_pressure
214 self.prev_memory_pressure = curr_memory_pressure
215 self.prev_pressure_time = now
216
217 pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
218 pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
219 if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
220 bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
221 self.pressure_state = pressure_state
222 return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
223 elif self.rq.max_loadfactor:
224 limit = False
225 loadfactor = float(os.getloadavg()[0]) / os.cpu_count()
226 # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor))
227 if loadfactor > self.rq.max_loadfactor:
228 limit = True
229 if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit:
230 bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))
231 self.loadfactor_limit = limit
232 return limit
233 return False
151 234
152 def next_buildable_task(self): 235 def next_buildable_task(self):
153 """ 236 """
@@ -161,6 +244,12 @@ class RunQueueScheduler(object):
161 if not buildable: 244 if not buildable:
162 return None 245 return None
163 246
247 # Bitbake requires that at least one task be active. Only check for pressure if
248 # this is the case, otherwise the pressure limitation could result in no tasks
249 # being active and no new tasks started thereby, at times, breaking the scheduler.
250 if self.rq.stats.active and self.exceeds_max_pressure():
251 return None
252
164 # Filter out tasks that have a max number of threads that have been exceeded 253 # Filter out tasks that have a max number of threads that have been exceeded
165 skip_buildable = {} 254 skip_buildable = {}
166 for running in self.rq.runq_running.difference(self.rq.runq_complete): 255 for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -191,11 +280,11 @@ class RunQueueScheduler(object):
191 best = None 280 best = None
192 bestprio = None 281 bestprio = None
193 for tid in buildable: 282 for tid in buildable:
194 taskname = taskname_from_tid(tid)
195 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
196 continue
197 prio = self.rev_prio_map[tid] 283 prio = self.rev_prio_map[tid]
198 if bestprio is None or bestprio > prio: 284 if bestprio is None or bestprio > prio:
285 taskname = taskname_from_tid(tid)
286 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
287 continue
199 stamp = self.stamps[tid] 288 stamp = self.stamps[tid]
200 if stamp in self.rq.build_stamps.values(): 289 if stamp in self.rq.build_stamps.values():
201 continue 290 continue
@@ -374,10 +463,9 @@ class RunQueueData:
374 self.rq = rq 463 self.rq = rq
375 self.warn_multi_bb = False 464 self.warn_multi_bb = False
376 465
377 self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" 466 self.multi_provider_allowed = (cfgData.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
378 self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() 467 self.setscene_ignore_tasks = get_setscene_enforce_ignore_tasks(cfgData, targets)
379 self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets) 468 self.setscene_ignore_tasks_checked = False
380 self.setscenewhitelist_checked = False
381 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") 469 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
382 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() 470 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
383 471
@@ -475,7 +563,7 @@ class RunQueueData:
475 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends))) 563 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends)))
476 msgs.append("\n") 564 msgs.append("\n")
477 if len(valid_chains) > 10: 565 if len(valid_chains) > 10:
478 msgs.append("Aborted dependency loops search after 10 matches.\n") 566 msgs.append("Halted dependency loops search after 10 matches.\n")
479 raise TooManyLoops 567 raise TooManyLoops
480 continue 568 continue
481 scan = False 569 scan = False
@@ -536,7 +624,7 @@ class RunQueueData:
536 next_points.append(revdep) 624 next_points.append(revdep)
537 task_done[revdep] = True 625 task_done[revdep] = True
538 endpoints = next_points 626 endpoints = next_points
539 if len(next_points) == 0: 627 if not next_points:
540 break 628 break
541 629
542 # Circular dependency sanity check 630 # Circular dependency sanity check
@@ -578,15 +666,18 @@ class RunQueueData:
578 666
579 found = False 667 found = False
580 for mc in self.taskData: 668 for mc in self.taskData:
581 if len(taskData[mc].taskentries) > 0: 669 if taskData[mc].taskentries:
582 found = True 670 found = True
583 break 671 break
584 if not found: 672 if not found:
585 # Nothing to do 673 # Nothing to do
586 return 0 674 return 0
587 675
676 bb.parse.siggen.setup_datacache(self.dataCaches)
677
588 self.init_progress_reporter.start() 678 self.init_progress_reporter.start()
589 self.init_progress_reporter.next_stage() 679 self.init_progress_reporter.next_stage()
680 bb.event.check_for_interrupts(self.cooker.data)
590 681
591 # Step A - Work out a list of tasks to run 682 # Step A - Work out a list of tasks to run
592 # 683 #
@@ -632,6 +723,8 @@ class RunQueueData:
632 frommc = mcdependency[1] 723 frommc = mcdependency[1]
633 mcdep = mcdependency[2] 724 mcdep = mcdependency[2]
634 deptask = mcdependency[4] 725 deptask = mcdependency[4]
726 if mcdep not in taskData:
727 bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep))
635 if mc == frommc: 728 if mc == frommc:
636 fn = taskData[mcdep].build_targets[pn][0] 729 fn = taskData[mcdep].build_targets[pn][0]
637 newdep = '%s:%s' % (fn,deptask) 730 newdep = '%s:%s' % (fn,deptask)
@@ -733,6 +826,7 @@ class RunQueueData:
733 #self.dump_data() 826 #self.dump_data()
734 827
735 self.init_progress_reporter.next_stage() 828 self.init_progress_reporter.next_stage()
829 bb.event.check_for_interrupts(self.cooker.data)
736 830
737 # Resolve recursive 'recrdeptask' dependencies (Part B) 831 # Resolve recursive 'recrdeptask' dependencies (Part B)
738 # 832 #
@@ -762,7 +856,7 @@ class RunQueueData:
762 # Find the dependency chain endpoints 856 # Find the dependency chain endpoints
763 endpoints = set() 857 endpoints = set()
764 for tid in self.runtaskentries: 858 for tid in self.runtaskentries:
765 if len(deps[tid]) == 0: 859 if not deps[tid]:
766 endpoints.add(tid) 860 endpoints.add(tid)
767 # Iterate the chains collating dependencies 861 # Iterate the chains collating dependencies
768 while endpoints: 862 while endpoints:
@@ -773,11 +867,11 @@ class RunQueueData:
773 cumulativedeps[dep].update(cumulativedeps[tid]) 867 cumulativedeps[dep].update(cumulativedeps[tid])
774 if tid in deps[dep]: 868 if tid in deps[dep]:
775 deps[dep].remove(tid) 869 deps[dep].remove(tid)
776 if len(deps[dep]) == 0: 870 if not deps[dep]:
777 next.add(dep) 871 next.add(dep)
778 endpoints = next 872 endpoints = next
779 #for tid in deps: 873 #for tid in deps:
780 # if len(deps[tid]) != 0: 874 # if deps[tid]:
781 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid])) 875 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
782 876
783 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to 877 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
@@ -829,6 +923,7 @@ class RunQueueData:
829 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) 923 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
830 924
831 self.init_progress_reporter.next_stage() 925 self.init_progress_reporter.next_stage()
926 bb.event.check_for_interrupts(self.cooker.data)
832 927
833 #self.dump_data() 928 #self.dump_data()
834 929
@@ -867,7 +962,7 @@ class RunQueueData:
867 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname) 962 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
868 else: 963 else:
869 logger.verbose("Invalidate task %s, %s", taskname, fn) 964 logger.verbose("Invalidate task %s, %s", taskname, fn)
870 bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn) 965 bb.parse.siggen.invalidate_task(taskname, taskfn)
871 966
872 self.target_tids = [] 967 self.target_tids = []
873 for (mc, target, task, fn) in self.targets: 968 for (mc, target, task, fn) in self.targets:
@@ -910,47 +1005,54 @@ class RunQueueData:
910 mark_active(tid, 1) 1005 mark_active(tid, 1)
911 1006
912 self.init_progress_reporter.next_stage() 1007 self.init_progress_reporter.next_stage()
1008 bb.event.check_for_interrupts(self.cooker.data)
913 1009
914 # Step C - Prune all inactive tasks 1010 # Step C - Prune all inactive tasks
915 # 1011 #
916 # Once all active tasks are marked, prune the ones we don't need. 1012 # Once all active tasks are marked, prune the ones we don't need.
917 1013
918 delcount = {}
919 for tid in list(self.runtaskentries.keys()):
920 if tid not in runq_build:
921 delcount[tid] = self.runtaskentries[tid]
922 del self.runtaskentries[tid]
923
924 # Handle --runall 1014 # Handle --runall
925 if self.cooker.configuration.runall: 1015 if self.cooker.configuration.runall:
926 # re-run the mark_active and then drop unused tasks from new list 1016 # re-run the mark_active and then drop unused tasks from new list
927 runq_build = {}
928 1017
929 for task in self.cooker.configuration.runall: 1018 runall_tids = set()
930 if not task.startswith("do_"): 1019 added = True
931 task = "do_{0}".format(task) 1020 while added:
932 runall_tids = set() 1021 reduced_tasklist = set(self.runtaskentries.keys())
933 for tid in list(self.runtaskentries): 1022 for tid in list(self.runtaskentries.keys()):
934 wanttid = "{0}:{1}".format(fn_from_tid(tid), task) 1023 if tid not in runq_build:
935 if wanttid in delcount: 1024 reduced_tasklist.remove(tid)
936 self.runtaskentries[wanttid] = delcount[wanttid] 1025 runq_build = {}
937 if wanttid in self.runtaskentries:
938 runall_tids.add(wanttid)
939
940 for tid in list(runall_tids):
941 mark_active(tid,1)
942 if self.cooker.configuration.force:
943 invalidate_task(tid, False)
944 1026
945 for tid in list(self.runtaskentries.keys()): 1027 orig = runall_tids
946 if tid not in runq_build: 1028 runall_tids = set()
947 delcount[tid] = self.runtaskentries[tid] 1029 for task in self.cooker.configuration.runall:
948 del self.runtaskentries[tid] 1030 if not task.startswith("do_"):
1031 task = "do_{0}".format(task)
1032 for tid in reduced_tasklist:
1033 wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
1034 if wanttid in self.runtaskentries:
1035 runall_tids.add(wanttid)
1036
1037 for tid in list(runall_tids):
1038 mark_active(tid, 1)
1039 self.target_tids.append(tid)
1040 if self.cooker.configuration.force:
1041 invalidate_task(tid, False)
1042 added = runall_tids - orig
1043
1044 delcount = set()
1045 for tid in list(self.runtaskentries.keys()):
1046 if tid not in runq_build:
1047 delcount.add(tid)
1048 del self.runtaskentries[tid]
949 1049
950 if len(self.runtaskentries) == 0: 1050 if self.cooker.configuration.runall:
1051 if not self.runtaskentries:
951 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) 1052 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
952 1053
953 self.init_progress_reporter.next_stage() 1054 self.init_progress_reporter.next_stage()
1055 bb.event.check_for_interrupts(self.cooker.data)
954 1056
955 # Handle runonly 1057 # Handle runonly
956 if self.cooker.configuration.runonly: 1058 if self.cooker.configuration.runonly:
@@ -960,19 +1062,19 @@ class RunQueueData:
960 for task in self.cooker.configuration.runonly: 1062 for task in self.cooker.configuration.runonly:
961 if not task.startswith("do_"): 1063 if not task.startswith("do_"):
962 task = "do_{0}".format(task) 1064 task = "do_{0}".format(task)
963 runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task } 1065 runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
964 1066
965 for tid in list(runonly_tids): 1067 for tid in runonly_tids:
966 mark_active(tid,1) 1068 mark_active(tid, 1)
967 if self.cooker.configuration.force: 1069 if self.cooker.configuration.force:
968 invalidate_task(tid, False) 1070 invalidate_task(tid, False)
969 1071
970 for tid in list(self.runtaskentries.keys()): 1072 for tid in list(self.runtaskentries.keys()):
971 if tid not in runq_build: 1073 if tid not in runq_build:
972 delcount[tid] = self.runtaskentries[tid] 1074 delcount.add(tid)
973 del self.runtaskentries[tid] 1075 del self.runtaskentries[tid]
974 1076
975 if len(self.runtaskentries) == 0: 1077 if not self.runtaskentries:
976 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets))) 1078 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
977 1079
978 # 1080 #
@@ -980,8 +1082,8 @@ class RunQueueData:
980 # 1082 #
981 1083
982 # Check to make sure we still have tasks to run 1084 # Check to make sure we still have tasks to run
983 if len(self.runtaskentries) == 0: 1085 if not self.runtaskentries:
984 if not taskData[''].abort: 1086 if not taskData[''].halt:
985 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.") 1087 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
986 else: 1088 else:
987 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.") 1089 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
@@ -991,6 +1093,7 @@ class RunQueueData:
991 logger.verbose("Assign Weightings") 1093 logger.verbose("Assign Weightings")
992 1094
993 self.init_progress_reporter.next_stage() 1095 self.init_progress_reporter.next_stage()
1096 bb.event.check_for_interrupts(self.cooker.data)
994 1097
995 # Generate a list of reverse dependencies to ease future calculations 1098 # Generate a list of reverse dependencies to ease future calculations
996 for tid in self.runtaskentries: 1099 for tid in self.runtaskentries:
@@ -998,13 +1101,14 @@ class RunQueueData:
998 self.runtaskentries[dep].revdeps.add(tid) 1101 self.runtaskentries[dep].revdeps.add(tid)
999 1102
1000 self.init_progress_reporter.next_stage() 1103 self.init_progress_reporter.next_stage()
1104 bb.event.check_for_interrupts(self.cooker.data)
1001 1105
1002 # Identify tasks at the end of dependency chains 1106 # Identify tasks at the end of dependency chains
1003 # Error on circular dependency loops (length two) 1107 # Error on circular dependency loops (length two)
1004 endpoints = [] 1108 endpoints = []
1005 for tid in self.runtaskentries: 1109 for tid in self.runtaskentries:
1006 revdeps = self.runtaskentries[tid].revdeps 1110 revdeps = self.runtaskentries[tid].revdeps
1007 if len(revdeps) == 0: 1111 if not revdeps:
1008 endpoints.append(tid) 1112 endpoints.append(tid)
1009 for dep in revdeps: 1113 for dep in revdeps:
1010 if dep in self.runtaskentries[tid].depends: 1114 if dep in self.runtaskentries[tid].depends:
@@ -1014,12 +1118,14 @@ class RunQueueData:
1014 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) 1118 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
1015 1119
1016 self.init_progress_reporter.next_stage() 1120 self.init_progress_reporter.next_stage()
1121 bb.event.check_for_interrupts(self.cooker.data)
1017 1122
1018 # Calculate task weights 1123 # Calculate task weights
1019 # Check of higher length circular dependencies 1124 # Check of higher length circular dependencies
1020 self.runq_weight = self.calculate_task_weights(endpoints) 1125 self.runq_weight = self.calculate_task_weights(endpoints)
1021 1126
1022 self.init_progress_reporter.next_stage() 1127 self.init_progress_reporter.next_stage()
1128 bb.event.check_for_interrupts(self.cooker.data)
1023 1129
1024 # Sanity Check - Check for multiple tasks building the same provider 1130 # Sanity Check - Check for multiple tasks building the same provider
1025 for mc in self.dataCaches: 1131 for mc in self.dataCaches:
@@ -1040,7 +1146,7 @@ class RunQueueData:
1040 for prov in prov_list: 1146 for prov in prov_list:
1041 if len(prov_list[prov]) < 2: 1147 if len(prov_list[prov]) < 2:
1042 continue 1148 continue
1043 if prov in self.multi_provider_whitelist: 1149 if prov in self.multi_provider_allowed:
1044 continue 1150 continue
1045 seen_pn = [] 1151 seen_pn = []
1046 # If two versions of the same PN are being built its fatal, we don't support it. 1152 # If two versions of the same PN are being built its fatal, we don't support it.
@@ -1050,12 +1156,12 @@ class RunQueueData:
1050 seen_pn.append(pn) 1156 seen_pn.append(pn)
1051 else: 1157 else:
1052 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn)) 1158 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
1053 msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov])) 1159 msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))]
1054 # 1160 #
1055 # Construct a list of things which uniquely depend on each provider 1161 # Construct a list of things which uniquely depend on each provider
1056 # since this may help the user figure out which dependency is triggering this warning 1162 # since this may help the user figure out which dependency is triggering this warning
1057 # 1163 #
1058 msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from." 1164 msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.")
1059 deplist = {} 1165 deplist = {}
1060 commondeps = None 1166 commondeps = None
1061 for provfn in prov_list[prov]: 1167 for provfn in prov_list[prov]:
@@ -1075,12 +1181,12 @@ class RunQueueData:
1075 commondeps &= deps 1181 commondeps &= deps
1076 deplist[provfn] = deps 1182 deplist[provfn] = deps
1077 for provfn in deplist: 1183 for provfn in deplist:
1078 msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)) 1184 msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)))
1079 # 1185 #
1080 # Construct a list of provides and runtime providers for each recipe 1186 # Construct a list of provides and runtime providers for each recipe
1081 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC) 1187 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
1082 # 1188 #
1083 msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful." 1189 msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.")
1084 provide_results = {} 1190 provide_results = {}
1085 rprovide_results = {} 1191 rprovide_results = {}
1086 commonprovs = None 1192 commonprovs = None
@@ -1107,30 +1213,20 @@ class RunQueueData:
1107 else: 1213 else:
1108 commonrprovs &= rprovides 1214 commonrprovs &= rprovides
1109 rprovide_results[provfn] = rprovides 1215 rprovide_results[provfn] = rprovides
1110 #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs)) 1216 #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs)))
1111 #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)) 1217 #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)))
1112 for provfn in prov_list[prov]: 1218 for provfn in prov_list[prov]:
1113 msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)) 1219 msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)))
1114 msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)) 1220 msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)))
1115 1221
1116 if self.warn_multi_bb: 1222 if self.warn_multi_bb:
1117 logger.verbnote(msg) 1223 logger.verbnote("".join(msgs))
1118 else: 1224 else:
1119 logger.error(msg) 1225 logger.error("".join(msgs))
1120 1226
1121 self.init_progress_reporter.next_stage() 1227 self.init_progress_reporter.next_stage()
1122
1123 # Create a whitelist usable by the stamp checks
1124 self.stampfnwhitelist = {}
1125 for mc in self.taskData:
1126 self.stampfnwhitelist[mc] = []
1127 for entry in self.stampwhitelist.split():
1128 if entry not in self.taskData[mc].build_targets:
1129 continue
1130 fn = self.taskData.build_targets[entry][0]
1131 self.stampfnwhitelist[mc].append(fn)
1132
1133 self.init_progress_reporter.next_stage() 1228 self.init_progress_reporter.next_stage()
1229 bb.event.check_for_interrupts(self.cooker.data)
1134 1230
1135 # Iterate over the task list looking for tasks with a 'setscene' function 1231 # Iterate over the task list looking for tasks with a 'setscene' function
1136 self.runq_setscene_tids = set() 1232 self.runq_setscene_tids = set()
@@ -1143,6 +1239,7 @@ class RunQueueData:
1143 self.runq_setscene_tids.add(tid) 1239 self.runq_setscene_tids.add(tid)
1144 1240
1145 self.init_progress_reporter.next_stage() 1241 self.init_progress_reporter.next_stage()
1242 bb.event.check_for_interrupts(self.cooker.data)
1146 1243
1147 # Invalidate task if force mode active 1244 # Invalidate task if force mode active
1148 if self.cooker.configuration.force: 1245 if self.cooker.configuration.force:
@@ -1159,6 +1256,7 @@ class RunQueueData:
1159 invalidate_task(fn + ":" + st, True) 1256 invalidate_task(fn + ":" + st, True)
1160 1257
1161 self.init_progress_reporter.next_stage() 1258 self.init_progress_reporter.next_stage()
1259 bb.event.check_for_interrupts(self.cooker.data)
1162 1260
1163 # Create and print to the logs a virtual/xxxx -> PN (fn) table 1261 # Create and print to the logs a virtual/xxxx -> PN (fn) table
1164 for mc in taskData: 1262 for mc in taskData:
@@ -1171,18 +1269,20 @@ class RunQueueData:
1171 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) 1269 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
1172 1270
1173 self.init_progress_reporter.next_stage() 1271 self.init_progress_reporter.next_stage()
1272 bb.event.check_for_interrupts(self.cooker.data)
1174 1273
1175 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1274 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1176 1275
1177 # Iterate over the task list and call into the siggen code 1276 # Iterate over the task list and call into the siggen code
1178 dealtwith = set() 1277 dealtwith = set()
1179 todeal = set(self.runtaskentries) 1278 todeal = set(self.runtaskentries)
1180 while len(todeal) > 0: 1279 while todeal:
1181 for tid in todeal.copy(): 1280 for tid in todeal.copy():
1182 if len(self.runtaskentries[tid].depends - dealtwith) == 0: 1281 if not (self.runtaskentries[tid].depends - dealtwith):
1183 dealtwith.add(tid) 1282 dealtwith.add(tid)
1184 todeal.remove(tid) 1283 todeal.remove(tid)
1185 self.prepare_task_hash(tid) 1284 self.prepare_task_hash(tid)
1285 bb.event.check_for_interrupts(self.cooker.data)
1186 1286
1187 bb.parse.siggen.writeout_file_checksum_cache() 1287 bb.parse.siggen.writeout_file_checksum_cache()
1188 1288
@@ -1190,9 +1290,8 @@ class RunQueueData:
1190 return len(self.runtaskentries) 1290 return len(self.runtaskentries)
1191 1291
1192 def prepare_task_hash(self, tid): 1292 def prepare_task_hash(self, tid):
1193 dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid)) 1293 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1194 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc) 1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1195 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
1196 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) 1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1197 1296
1198 def dump_data(self): 1297 def dump_data(self):
@@ -1218,7 +1317,6 @@ class RunQueue:
1218 self.cfgData = cfgData 1317 self.cfgData = cfgData
1219 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) 1318 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
1220 1319
1221 self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
1222 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None 1320 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
1223 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None 1321 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
1224 1322
@@ -1237,30 +1335,40 @@ class RunQueue:
1237 self.worker = {} 1335 self.worker = {}
1238 self.fakeworker = {} 1336 self.fakeworker = {}
1239 1337
1338 @staticmethod
1339 def send_pickled_data(worker, data, name):
1340 msg = bytearray()
1341 msg.extend(b"<" + name.encode() + b">")
1342 pickled_data = pickle.dumps(data)
1343 msg.extend(len(pickled_data).to_bytes(4, 'big'))
1344 msg.extend(pickled_data)
1345 msg.extend(b"</" + name.encode() + b">")
1346 worker.stdin.write(msg)
1347
1240 def _start_worker(self, mc, fakeroot = False, rqexec = None): 1348 def _start_worker(self, mc, fakeroot = False, rqexec = None):
1241 logger.debug("Starting bitbake-worker") 1349 logger.debug("Starting bitbake-worker")
1242 magic = "decafbad" 1350 magic = "decafbad"
1243 if self.cooker.configuration.profile: 1351 if self.cooker.configuration.profile:
1244 magic = "decafbadbad" 1352 magic = "decafbadbad"
1353 fakerootlogs = None
1354
1355 workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker")
1245 if fakeroot: 1356 if fakeroot:
1246 magic = magic + "beef" 1357 magic = magic + "beef"
1247 mcdata = self.cooker.databuilder.mcdata[mc] 1358 mcdata = self.cooker.databuilder.mcdata[mc]
1248 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD")) 1359 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
1249 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() 1360 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
1250 env = os.environ.copy() 1361 env = os.environ.copy()
1251 for key, value in (var.split('=') for var in fakerootenv): 1362 for key, value in (var.split('=',1) for var in fakerootenv):
1252 env[key] = value 1363 env[key] = value
1253 worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env) 1364 worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
1365 fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
1254 else: 1366 else:
1255 worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE) 1367 worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
1256 bb.utils.nonblockingfd(worker.stdout) 1368 bb.utils.nonblockingfd(worker.stdout)
1257 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec) 1369 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
1258 1370
1259 workerdata = { 1371 workerdata = {
1260 "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
1261 "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
1262 "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
1263 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
1264 "sigdata" : bb.parse.siggen.get_taskdata(), 1372 "sigdata" : bb.parse.siggen.get_taskdata(),
1265 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, 1373 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
1266 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, 1374 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -1274,9 +1382,9 @@ class RunQueue:
1274 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"), 1382 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
1275 } 1383 }
1276 1384
1277 worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") 1385 RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
1278 worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>") 1386 RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
1279 worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>") 1387 RunQueue.send_pickled_data(worker, workerdata, "workerdata")
1280 worker.stdin.flush() 1388 worker.stdin.flush()
1281 1389
1282 return RunQueueWorker(worker, workerpipe) 1390 return RunQueueWorker(worker, workerpipe)
@@ -1286,7 +1394,7 @@ class RunQueue:
1286 return 1394 return
1287 logger.debug("Teardown for bitbake-worker") 1395 logger.debug("Teardown for bitbake-worker")
1288 try: 1396 try:
1289 worker.process.stdin.write(b"<quit></quit>") 1397 RunQueue.send_pickled_data(worker.process, b"", "quit")
1290 worker.process.stdin.flush() 1398 worker.process.stdin.flush()
1291 worker.process.stdin.close() 1399 worker.process.stdin.close()
1292 except IOError: 1400 except IOError:
@@ -1298,12 +1406,12 @@ class RunQueue:
1298 continue 1406 continue
1299 worker.pipe.close() 1407 worker.pipe.close()
1300 1408
1301 def start_worker(self): 1409 def start_worker(self, rqexec):
1302 if self.worker: 1410 if self.worker:
1303 self.teardown_workers() 1411 self.teardown_workers()
1304 self.teardown = False 1412 self.teardown = False
1305 for mc in self.rqdata.dataCaches: 1413 for mc in self.rqdata.dataCaches:
1306 self.worker[mc] = self._start_worker(mc) 1414 self.worker[mc] = self._start_worker(mc, False, rqexec)
1307 1415
1308 def start_fakeworker(self, rqexec, mc): 1416 def start_fakeworker(self, rqexec, mc):
1309 if not mc in self.fakeworker: 1417 if not mc in self.fakeworker:
@@ -1345,15 +1453,7 @@ class RunQueue:
1345 if taskname is None: 1453 if taskname is None:
1346 taskname = tn 1454 taskname = tn
1347 1455
1348 if self.stamppolicy == "perfile": 1456 stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn)
1349 fulldeptree = False
1350 else:
1351 fulldeptree = True
1352 stampwhitelist = []
1353 if self.stamppolicy == "whitelist":
1354 stampwhitelist = self.rqdata.stampfnwhitelist[mc]
1355
1356 stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
1357 1457
1358 # If the stamp is missing, it's not current 1458 # If the stamp is missing, it's not current
1359 if not os.access(stampfile, os.F_OK): 1459 if not os.access(stampfile, os.F_OK):
@@ -1365,7 +1465,7 @@ class RunQueue:
1365 logger.debug2("%s.%s is nostamp\n", fn, taskname) 1465 logger.debug2("%s.%s is nostamp\n", fn, taskname)
1366 return False 1466 return False
1367 1467
1368 if taskname != "do_setscene" and taskname.endswith("_setscene"): 1468 if taskname.endswith("_setscene"):
1369 return True 1469 return True
1370 1470
1371 if cache is None: 1471 if cache is None:
@@ -1376,15 +1476,15 @@ class RunQueue:
1376 for dep in self.rqdata.runtaskentries[tid].depends: 1476 for dep in self.rqdata.runtaskentries[tid].depends:
1377 if iscurrent: 1477 if iscurrent:
1378 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep) 1478 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
1379 stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2) 1479 stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2)
1380 stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2) 1480 stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2)
1381 t2 = get_timestamp(stampfile2) 1481 t2 = get_timestamp(stampfile2)
1382 t3 = get_timestamp(stampfile3) 1482 t3 = get_timestamp(stampfile3)
1383 if t3 and not t2: 1483 if t3 and not t2:
1384 continue 1484 continue
1385 if t3 and t3 > t2: 1485 if t3 and t3 > t2:
1386 continue 1486 continue
1387 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): 1487 if fn == fn2:
1388 if not t2: 1488 if not t2:
1389 logger.debug2('Stampfile %s does not exist', stampfile2) 1489 logger.debug2('Stampfile %s does not exist', stampfile2)
1390 iscurrent = False 1490 iscurrent = False
@@ -1434,10 +1534,11 @@ class RunQueue:
1434 """ 1534 """
1435 Run the tasks in a queue prepared by rqdata.prepare() 1535 Run the tasks in a queue prepared by rqdata.prepare()
1436 Upon failure, optionally try to recover the build using any alternate providers 1536 Upon failure, optionally try to recover the build using any alternate providers
1437 (if the abort on failure configuration option isn't set) 1537 (if the halt on failure configuration option isn't set)
1438 """ 1538 """
1439 1539
1440 retval = True 1540 retval = True
1541 bb.event.check_for_interrupts(self.cooker.data)
1441 1542
1442 if self.state is runQueuePrepare: 1543 if self.state is runQueuePrepare:
1443 # NOTE: if you add, remove or significantly refactor the stages of this 1544 # NOTE: if you add, remove or significantly refactor the stages of this
@@ -1466,10 +1567,13 @@ class RunQueue:
1466 1567
1467 if not self.dm_event_handler_registered: 1568 if not self.dm_event_handler_registered:
1468 res = bb.event.register(self.dm_event_handler_name, 1569 res = bb.event.register(self.dm_event_handler_name,
1469 lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, 1570 lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
1470 ('bb.event.HeartbeatEvent',), data=self.cfgData) 1571 ('bb.event.HeartbeatEvent',), data=self.cfgData)
1471 self.dm_event_handler_registered = True 1572 self.dm_event_handler_registered = True
1472 1573
1574 self.rqdata.init_progress_reporter.next_stage()
1575 self.rqexe = RunQueueExecute(self)
1576
1473 dump = self.cooker.configuration.dump_signatures 1577 dump = self.cooker.configuration.dump_signatures
1474 if dump: 1578 if dump:
1475 self.rqdata.init_progress_reporter.finish() 1579 self.rqdata.init_progress_reporter.finish()
@@ -1481,16 +1585,14 @@ class RunQueue:
1481 self.state = runQueueComplete 1585 self.state = runQueueComplete
1482 1586
1483 if self.state is runQueueSceneInit: 1587 if self.state is runQueueSceneInit:
1484 self.rqdata.init_progress_reporter.next_stage() 1588 self.start_worker(self.rqexe)
1485 self.start_worker() 1589 self.rqdata.init_progress_reporter.finish()
1486 self.rqdata.init_progress_reporter.next_stage()
1487 self.rqexe = RunQueueExecute(self)
1488 1590
1489 # If we don't have any setscene functions, skip execution 1591 # If we don't have any setscene functions, skip execution
1490 if len(self.rqdata.runq_setscene_tids) == 0: 1592 if not self.rqdata.runq_setscene_tids:
1491 logger.info('No setscene tasks') 1593 logger.info('No setscene tasks')
1492 for tid in self.rqdata.runtaskentries: 1594 for tid in self.rqdata.runtaskentries:
1493 if len(self.rqdata.runtaskentries[tid].depends) == 0: 1595 if not self.rqdata.runtaskentries[tid].depends:
1494 self.rqexe.setbuildable(tid) 1596 self.rqexe.setbuildable(tid)
1495 self.rqexe.tasks_notcovered.add(tid) 1597 self.rqexe.tasks_notcovered.add(tid)
1496 self.rqexe.sqdone = True 1598 self.rqexe.sqdone = True
@@ -1563,29 +1665,28 @@ class RunQueue:
1563 else: 1665 else:
1564 self.rqexe.finish() 1666 self.rqexe.finish()
1565 1667
1566 def rq_dump_sigfn(self, fn, options): 1668 def _rq_dump_sigtid(self, tids):
1567 bb_cache = bb.cache.NoCache(self.cooker.databuilder) 1669 for tid in tids:
1568 mc = bb.runqueue.mc_from_tid(fn) 1670 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1569 the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn)) 1671 dataCaches = self.rqdata.dataCaches
1570 siggen = bb.parse.siggen 1672 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
1571 dataCaches = self.rqdata.dataCaches
1572 siggen.dump_sigfn(fn, dataCaches, options)
1573 1673
1574 def dump_signatures(self, options): 1674 def dump_signatures(self, options):
1575 fns = set() 1675 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
1576 bb.note("Reparsing files to collect dependency data") 1676 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
1577 1677
1578 for tid in self.rqdata.runtaskentries: 1678 bb.note("Writing task signature files")
1579 fn = fn_from_tid(tid)
1580 fns.add(fn)
1581 1679
1582 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) 1680 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
1681 def chunkify(l, n):
1682 return [l[i::n] for i in range(n)]
1683 tids = chunkify(list(self.rqdata.runtaskentries), max_process)
1583 # We cannot use the real multiprocessing.Pool easily due to some local data 1684 # We cannot use the real multiprocessing.Pool easily due to some local data
1584 # that can't be pickled. This is a cheap multi-process solution. 1685 # that can't be pickled. This is a cheap multi-process solution.
1585 launched = [] 1686 launched = []
1586 while fns: 1687 while tids:
1587 if len(launched) < max_process: 1688 if len(launched) < max_process:
1588 p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options)) 1689 p = Process(target=self._rq_dump_sigtid, args=(tids.pop(), ))
1589 p.start() 1690 p.start()
1590 launched.append(p) 1691 launched.append(p)
1591 for q in launched: 1692 for q in launched:
@@ -1600,6 +1701,17 @@ class RunQueue:
1600 return 1701 return
1601 1702
1602 def print_diffscenetasks(self): 1703 def print_diffscenetasks(self):
1704 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
1705 invalidtasks = []
1706 for t in taskdepends[task].depends:
1707 if t not in valid and t not in visited_invalid:
1708 invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid))
1709 visited_invalid.add(t)
1710
1711 direct_invalid = [t for t in taskdepends[task].depends if t not in valid]
1712 if not direct_invalid and task not in noexec:
1713 invalidtasks = [task]
1714 return invalidtasks
1603 1715
1604 noexec = [] 1716 noexec = []
1605 tocheck = set() 1717 tocheck = set()
@@ -1633,46 +1745,49 @@ class RunQueue:
1633 valid_new.add(dep) 1745 valid_new.add(dep)
1634 1746
1635 invalidtasks = set() 1747 invalidtasks = set()
1636 for tid in self.rqdata.runtaskentries:
1637 if tid not in valid_new and tid not in noexec:
1638 invalidtasks.add(tid)
1639 1748
1640 found = set() 1749 toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets])
1641 processed = set() 1750 for tid in toptasks:
1642 for tid in invalidtasks:
1643 toprocess = set([tid]) 1751 toprocess = set([tid])
1644 while toprocess: 1752 while toprocess:
1645 next = set() 1753 next = set()
1754 visited_invalid = set()
1646 for t in toprocess: 1755 for t in toprocess:
1647 for dep in self.rqdata.runtaskentries[t].depends: 1756 if t not in valid_new and t not in noexec:
1648 if dep in invalidtasks: 1757 invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid))
1649 found.add(tid) 1758 continue
1650 if dep not in processed: 1759 if t in self.rqdata.runq_setscene_tids:
1651 processed.add(dep) 1760 for dep in self.rqexe.sqdata.sq_deps[t]:
1652 next.add(dep) 1761 next.add(dep)
1762 continue
1763
1764 for dep in self.rqdata.runtaskentries[t].depends:
1765 next.add(dep)
1766
1653 toprocess = next 1767 toprocess = next
1654 if tid in found:
1655 toprocess = set()
1656 1768
1657 tasklist = [] 1769 tasklist = []
1658 for tid in invalidtasks.difference(found): 1770 for tid in invalidtasks:
1659 tasklist.append(tid) 1771 tasklist.append(tid)
1660 1772
1661 if tasklist: 1773 if tasklist:
1662 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist)) 1774 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
1663 1775
1664 return invalidtasks.difference(found) 1776 return invalidtasks
1665 1777
1666 def write_diffscenetasks(self, invalidtasks): 1778 def write_diffscenetasks(self, invalidtasks):
1779 bb.siggen.check_siggen_version(bb.siggen)
1667 1780
1668 # Define recursion callback 1781 # Define recursion callback
1669 def recursecb(key, hash1, hash2): 1782 def recursecb(key, hash1, hash2):
1670 hashes = [hash1, hash2] 1783 hashes = [hash1, hash2]
1784 bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes))
1671 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData) 1785 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
1786 bb.debug(1, "Found hashfiles:\n{}".format(hashfiles))
1672 1787
1673 recout = [] 1788 recout = []
1674 if len(hashfiles) == 2: 1789 if len(hashfiles) == 2:
1675 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) 1790 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
1676 recout.extend(list(' ' + l for l in out2)) 1791 recout.extend(list(' ' + l for l in out2))
1677 else: 1792 else:
1678 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) 1793 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
@@ -1683,20 +1798,25 @@ class RunQueue:
1683 for tid in invalidtasks: 1798 for tid in invalidtasks:
1684 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 1799 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1685 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 1800 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
1686 h = self.rqdata.runtaskentries[tid].hash 1801 h = self.rqdata.runtaskentries[tid].unihash
1687 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData) 1802 bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname))
1803 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
1804 bb.debug(1, "Found hashfiles:\n{}".format(matches))
1688 match = None 1805 match = None
1689 for m in matches: 1806 for m in matches.values():
1690 if h in m: 1807 if h in m['path']:
1691 match = m 1808 match = m['path']
1692 if match is None: 1809 if match is None:
1693 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) 1810 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
1694 matches = {k : v for k, v in iter(matches.items()) if h not in k} 1811 matches = {k : v for k, v in iter(matches.items()) if h not in k}
1812 matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']}
1813 if matches_local:
1814 matches = matches_local
1695 if matches: 1815 if matches:
1696 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] 1816 latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path']
1697 prevh = __find_sha256__.search(latestmatch).group(0) 1817 prevh = __find_sha256__.search(latestmatch).group(0)
1698 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb) 1818 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
1699 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output)) 1819 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
1700 1820
1701 1821
1702class RunQueueExecute: 1822class RunQueueExecute:
@@ -1709,6 +1829,10 @@ class RunQueueExecute:
1709 1829
1710 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) 1830 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
1711 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" 1831 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
1832 self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
1833 self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
1834 self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
1835 self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX")
1712 1836
1713 self.sq_buildable = set() 1837 self.sq_buildable = set()
1714 self.sq_running = set() 1838 self.sq_running = set()
@@ -1726,6 +1850,8 @@ class RunQueueExecute:
1726 self.build_stamps2 = [] 1850 self.build_stamps2 = []
1727 self.failed_tids = [] 1851 self.failed_tids = []
1728 self.sq_deferred = {} 1852 self.sq_deferred = {}
1853 self.sq_needed_harddeps = set()
1854 self.sq_harddep_deferred = set()
1729 1855
1730 self.stampcache = {} 1856 self.stampcache = {}
1731 1857
@@ -1733,17 +1859,39 @@ class RunQueueExecute:
1733 self.holdoff_need_update = True 1859 self.holdoff_need_update = True
1734 self.sqdone = False 1860 self.sqdone = False
1735 1861
1736 self.stats = RunQueueStats(len(self.rqdata.runtaskentries)) 1862 self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
1737 self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
1738
1739 for mc in rq.worker:
1740 rq.worker[mc].pipe.setrunqueueexec(self)
1741 for mc in rq.fakeworker:
1742 rq.fakeworker[mc].pipe.setrunqueueexec(self)
1743 1863
1744 if self.number_tasks <= 0: 1864 if self.number_tasks <= 0:
1745 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) 1865 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
1746 1866
1867 lower_limit = 1.0
1868 upper_limit = 1000000.0
1869 if self.max_cpu_pressure:
1870 self.max_cpu_pressure = float(self.max_cpu_pressure)
1871 if self.max_cpu_pressure < lower_limit:
1872 bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
1873 if self.max_cpu_pressure > upper_limit:
1874 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
1875
1876 if self.max_io_pressure:
1877 self.max_io_pressure = float(self.max_io_pressure)
1878 if self.max_io_pressure < lower_limit:
1879 bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
1880 if self.max_io_pressure > upper_limit:
1881 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1882
1883 if self.max_memory_pressure:
1884 self.max_memory_pressure = float(self.max_memory_pressure)
1885 if self.max_memory_pressure < lower_limit:
1886 bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
1887 if self.max_memory_pressure > upper_limit:
1888 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1889
1890 if self.max_loadfactor:
1891 self.max_loadfactor = float(self.max_loadfactor)
1892 if self.max_loadfactor <= 0:
1893 bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor))
1894
1747 # List of setscene tasks which we've covered 1895 # List of setscene tasks which we've covered
1748 self.scenequeue_covered = set() 1896 self.scenequeue_covered = set()
1749 # List of tasks which are covered (including setscene ones) 1897 # List of tasks which are covered (including setscene ones)
@@ -1753,11 +1901,6 @@ class RunQueueExecute:
1753 self.tasks_notcovered = set() 1901 self.tasks_notcovered = set()
1754 self.scenequeue_notneeded = set() 1902 self.scenequeue_notneeded = set()
1755 1903
1756 # We can't skip specified target tasks which aren't setscene tasks
1757 self.cantskip = set(self.rqdata.target_tids)
1758 self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
1759 self.cantskip.intersection_update(self.rqdata.runtaskentries)
1760
1761 schedulers = self.get_schedulers() 1904 schedulers = self.get_schedulers()
1762 for scheduler in schedulers: 1905 for scheduler in schedulers:
1763 if self.scheduler == scheduler.name: 1906 if self.scheduler == scheduler.name:
@@ -1768,11 +1911,29 @@ class RunQueueExecute:
1768 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % 1911 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
1769 (self.scheduler, ", ".join(obj.name for obj in schedulers))) 1912 (self.scheduler, ", ".join(obj.name for obj in schedulers)))
1770 1913
1771 #if len(self.rqdata.runq_setscene_tids) > 0: 1914 #if self.rqdata.runq_setscene_tids:
1772 self.sqdata = SQData() 1915 self.sqdata = SQData()
1773 build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self) 1916 build_scenequeue_data(self.sqdata, self.rqdata, self)
1917
1918 update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
1919
1920 # Compute a list of 'stale' sstate tasks where the current hash does not match the one
1921 # in any stamp files. Pass the list out to metadata as an event.
1922 found = {}
1923 for tid in self.rqdata.runq_setscene_tids:
1924 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1925 stamps = bb.build.find_stale_stamps(taskname, taskfn)
1926 if stamps:
1927 if mc not in found:
1928 found[mc] = {}
1929 found[mc][tid] = stamps
1930 for mc in found:
1931 event = bb.event.StaleSetSceneTasks(found[mc])
1932 bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
1933
1934 self.build_taskdepdata_cache()
1774 1935
1775 def runqueue_process_waitpid(self, task, status): 1936 def runqueue_process_waitpid(self, task, status, fakerootlog=None):
1776 1937
1777 # self.build_stamps[pid] may not exist when use shared work directory. 1938 # self.build_stamps[pid] may not exist when use shared work directory.
1778 if task in self.build_stamps: 1939 if task in self.build_stamps:
@@ -1785,9 +1946,10 @@ class RunQueueExecute:
1785 else: 1946 else:
1786 self.sq_task_complete(task) 1947 self.sq_task_complete(task)
1787 self.sq_live.remove(task) 1948 self.sq_live.remove(task)
1949 self.stats.updateActiveSetscene(len(self.sq_live))
1788 else: 1950 else:
1789 if status != 0: 1951 if status != 0:
1790 self.task_fail(task, status) 1952 self.task_fail(task, status, fakerootlog=fakerootlog)
1791 else: 1953 else:
1792 self.task_complete(task) 1954 self.task_complete(task)
1793 return True 1955 return True
@@ -1795,20 +1957,20 @@ class RunQueueExecute:
1795 def finish_now(self): 1957 def finish_now(self):
1796 for mc in self.rq.worker: 1958 for mc in self.rq.worker:
1797 try: 1959 try:
1798 self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>") 1960 RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
1799 self.rq.worker[mc].process.stdin.flush() 1961 self.rq.worker[mc].process.stdin.flush()
1800 except IOError: 1962 except IOError:
1801 # worker must have died? 1963 # worker must have died?
1802 pass 1964 pass
1803 for mc in self.rq.fakeworker: 1965 for mc in self.rq.fakeworker:
1804 try: 1966 try:
1805 self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>") 1967 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
1806 self.rq.fakeworker[mc].process.stdin.flush() 1968 self.rq.fakeworker[mc].process.stdin.flush()
1807 except IOError: 1969 except IOError:
1808 # worker must have died? 1970 # worker must have died?
1809 pass 1971 pass
1810 1972
1811 if len(self.failed_tids) != 0: 1973 if self.failed_tids:
1812 self.rq.state = runQueueFailed 1974 self.rq.state = runQueueFailed
1813 return 1975 return
1814 1976
@@ -1818,13 +1980,13 @@ class RunQueueExecute:
1818 def finish(self): 1980 def finish(self):
1819 self.rq.state = runQueueCleanUp 1981 self.rq.state = runQueueCleanUp
1820 1982
1821 active = self.stats.active + self.sq_stats.active 1983 active = self.stats.active + len(self.sq_live)
1822 if active > 0: 1984 if active > 0:
1823 bb.event.fire(runQueueExitWait(active), self.cfgData) 1985 bb.event.fire(runQueueExitWait(active), self.cfgData)
1824 self.rq.read_workers() 1986 self.rq.read_workers()
1825 return self.rq.active_fds() 1987 return self.rq.active_fds()
1826 1988
1827 if len(self.failed_tids) != 0: 1989 if self.failed_tids:
1828 self.rq.state = runQueueFailed 1990 self.rq.state = runQueueFailed
1829 return True 1991 return True
1830 1992
@@ -1851,7 +2013,7 @@ class RunQueueExecute:
1851 return valid 2013 return valid
1852 2014
1853 def can_start_task(self): 2015 def can_start_task(self):
1854 active = self.stats.active + self.sq_stats.active 2016 active = self.stats.active + len(self.sq_live)
1855 can_start = active < self.number_tasks 2017 can_start = active < self.number_tasks
1856 return can_start 2018 return can_start
1857 2019
@@ -1871,8 +2033,7 @@ class RunQueueExecute:
1871 try: 2033 try:
1872 module = __import__(modname, fromlist=(name,)) 2034 module = __import__(modname, fromlist=(name,))
1873 except ImportError as exc: 2035 except ImportError as exc:
1874 logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) 2036 bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
1875 raise SystemExit(1)
1876 else: 2037 else:
1877 schedulers.add(getattr(module, name)) 2038 schedulers.add(getattr(module, name))
1878 return schedulers 2039 return schedulers
@@ -1902,21 +2063,52 @@ class RunQueueExecute:
1902 self.setbuildable(revdep) 2063 self.setbuildable(revdep)
1903 logger.debug("Marking task %s as buildable", revdep) 2064 logger.debug("Marking task %s as buildable", revdep)
1904 2065
2066 found = None
2067 for t in sorted(self.sq_deferred.copy()):
2068 if self.sq_deferred[t] == task:
2069 # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
2070 # We shouldn't allow all to run at once as it is prone to races.
2071 if not found:
2072 bb.debug(1, "Deferred task %s now buildable" % t)
2073 del self.sq_deferred[t]
2074 update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2075 found = t
2076 else:
2077 bb.debug(1, "Deferring %s after %s" % (t, found))
2078 self.sq_deferred[t] = found
2079
1905 def task_complete(self, task): 2080 def task_complete(self, task):
1906 self.stats.taskCompleted() 2081 self.stats.taskCompleted()
1907 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) 2082 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1908 self.task_completeoutright(task) 2083 self.task_completeoutright(task)
1909 self.runq_tasksrun.add(task) 2084 self.runq_tasksrun.add(task)
1910 2085
1911 def task_fail(self, task, exitcode): 2086 def task_fail(self, task, exitcode, fakerootlog=None):
1912 """ 2087 """
1913 Called when a task has failed 2088 Called when a task has failed
1914 Updates the state engine with the failure 2089 Updates the state engine with the failure
1915 """ 2090 """
1916 self.stats.taskFailed() 2091 self.stats.taskFailed()
1917 self.failed_tids.append(task) 2092 self.failed_tids.append(task)
1918 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData) 2093
1919 if self.rqdata.taskData[''].abort: 2094 fakeroot_log = []
2095 if fakerootlog and os.path.exists(fakerootlog):
2096 with open(fakerootlog) as fakeroot_log_file:
2097 fakeroot_failed = False
2098 for line in reversed(fakeroot_log_file.readlines()):
2099 for fakeroot_error in ['mismatch', 'error', 'fatal']:
2100 if fakeroot_error in line.lower():
2101 fakeroot_failed = True
2102 if 'doing new pid setup and server start' in line:
2103 break
2104 fakeroot_log.append(line)
2105
2106 if not fakeroot_failed:
2107 fakeroot_log = []
2108
2109 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
2110
2111 if self.rqdata.taskData[''].halt:
1920 self.rq.state = runQueueCleanUp 2112 self.rq.state = runQueueCleanUp
1921 2113
1922 def task_skip(self, task, reason): 2114 def task_skip(self, task, reason):
@@ -1931,7 +2123,7 @@ class RunQueueExecute:
1931 err = False 2123 err = False
1932 if not self.sqdone: 2124 if not self.sqdone:
1933 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) 2125 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
1934 completeevent = sceneQueueComplete(self.sq_stats, self.rq) 2126 completeevent = sceneQueueComplete(self.stats, self.rq)
1935 bb.event.fire(completeevent, self.cfgData) 2127 bb.event.fire(completeevent, self.cfgData)
1936 if self.sq_deferred: 2128 if self.sq_deferred:
1937 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred)) 2129 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred))
@@ -1943,6 +2135,10 @@ class RunQueueExecute:
1943 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) 2135 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
1944 err = True 2136 err = True
1945 2137
2138 for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
2139 # No task should end up in both covered and uncovered, that is a bug.
2140 logger.error("Setscene task %s in both covered and notcovered." % tid)
2141
1946 for tid in self.rqdata.runq_setscene_tids: 2142 for tid in self.rqdata.runq_setscene_tids:
1947 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: 2143 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
1948 err = True 2144 err = True
@@ -1961,7 +2157,7 @@ class RunQueueExecute:
1961 if x not in self.tasks_scenequeue_done: 2157 if x not in self.tasks_scenequeue_done:
1962 logger.error("Task %s was never processed by the setscene code" % x) 2158 logger.error("Task %s was never processed by the setscene code" % x)
1963 err = True 2159 err = True
1964 if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable: 2160 if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable:
1965 logger.error("Task %s was never marked as buildable by the setscene code" % x) 2161 logger.error("Task %s was never marked as buildable by the setscene code" % x)
1966 err = True 2162 err = True
1967 return err 2163 return err
@@ -1984,8 +2180,11 @@ class RunQueueExecute:
1984 if not self.sqdone and self.can_start_task(): 2180 if not self.sqdone and self.can_start_task():
1985 # Find the next setscene to run 2181 # Find the next setscene to run
1986 for nexttask in self.sorted_setscene_tids: 2182 for nexttask in self.sorted_setscene_tids:
1987 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): 2183 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
1988 if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): 2184 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2185 nexttask not in self.sq_needed_harddeps and \
2186 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
2187 self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
1989 if nexttask not in self.rqdata.target_tids: 2188 if nexttask not in self.rqdata.target_tids:
1990 logger.debug2("Skipping setscene for task %s" % nexttask) 2189 logger.debug2("Skipping setscene for task %s" % nexttask)
1991 self.sq_task_skip(nexttask) 2190 self.sq_task_skip(nexttask)
@@ -1993,6 +2192,19 @@ class RunQueueExecute:
1993 if nexttask in self.sq_deferred: 2192 if nexttask in self.sq_deferred:
1994 del self.sq_deferred[nexttask] 2193 del self.sq_deferred[nexttask]
1995 return True 2194 return True
2195 if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2196 logger.debug2("Deferring %s due to hard dependencies" % nexttask)
2197 updated = False
2198 for dep in self.sqdata.sq_harddeps_rev[nexttask]:
2199 if dep not in self.sq_needed_harddeps:
2200 logger.debug2("Enabling task %s as it is a hard dependency" % dep)
2201 self.sq_buildable.add(dep)
2202 self.sq_needed_harddeps.add(dep)
2203 updated = True
2204 self.sq_harddep_deferred.add(nexttask)
2205 if updated:
2206 return True
2207 continue
1996 # If covered tasks are running, need to wait for them to complete 2208 # If covered tasks are running, need to wait for them to complete
1997 for t in self.sqdata.sq_covered_tasks[nexttask]: 2209 for t in self.sqdata.sq_covered_tasks[nexttask]:
1998 if t in self.runq_running and t not in self.runq_complete: 2210 if t in self.runq_running and t not in self.runq_complete:
@@ -2007,8 +2219,6 @@ class RunQueueExecute:
2007 logger.debug("%s didn't become valid, skipping setscene" % nexttask) 2219 logger.debug("%s didn't become valid, skipping setscene" % nexttask)
2008 self.sq_task_failoutright(nexttask) 2220 self.sq_task_failoutright(nexttask)
2009 return True 2221 return True
2010 else:
2011 self.sqdata.outrightfail.remove(nexttask)
2012 if nexttask in self.sqdata.outrightfail: 2222 if nexttask in self.sqdata.outrightfail:
2013 logger.debug2('No package found, so skipping setscene task %s', nexttask) 2223 logger.debug2('No package found, so skipping setscene task %s', nexttask)
2014 self.sq_task_failoutright(nexttask) 2224 self.sq_task_failoutright(nexttask)
@@ -2040,28 +2250,42 @@ class RunQueueExecute:
2040 self.sq_task_failoutright(task) 2250 self.sq_task_failoutright(task)
2041 return True 2251 return True
2042 2252
2043 startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq) 2253 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
2044 bb.event.fire(startevent, self.cfgData) 2254 bb.event.fire(startevent, self.cfgData)
2045 2255
2046 taskdepdata = self.sq_build_taskdepdata(task)
2047
2048 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2256 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2049 taskhash = self.rqdata.get_task_hash(task) 2257 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2050 unihash = self.rqdata.get_task_unihash(task) 2258 runtask = {
2259 'fn' : taskfn,
2260 'task' : task,
2261 'taskname' : taskname,
2262 'taskhash' : self.rqdata.get_task_hash(task),
2263 'unihash' : self.rqdata.get_task_unihash(task),
2264 'quieterrors' : True,
2265 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2266 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2267 'taskdepdata' : self.sq_build_taskdepdata(task),
2268 'dry_run' : False,
2269 'taskdep': taskdep,
2270 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2271 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2272 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2273 }
2274
2051 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: 2275 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
2052 if not mc in self.rq.fakeworker: 2276 if not mc in self.rq.fakeworker:
2053 self.rq.start_fakeworker(self, mc) 2277 self.rq.start_fakeworker(self, mc)
2054 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2278 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2055 self.rq.fakeworker[mc].process.stdin.flush() 2279 self.rq.fakeworker[mc].process.stdin.flush()
2056 else: 2280 else:
2057 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2281 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2058 self.rq.worker[mc].process.stdin.flush() 2282 self.rq.worker[mc].process.stdin.flush()
2059 2283
2060 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2284 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2061 self.build_stamps2.append(self.build_stamps[task]) 2285 self.build_stamps2.append(self.build_stamps[task])
2062 self.sq_running.add(task) 2286 self.sq_running.add(task)
2063 self.sq_live.add(task) 2287 self.sq_live.add(task)
2064 self.sq_stats.taskActive() 2288 self.stats.updateActiveSetscene(len(self.sq_live))
2065 if self.can_start_task(): 2289 if self.can_start_task():
2066 return True 2290 return True
2067 2291
@@ -2092,9 +2316,9 @@ class RunQueueExecute:
2092 if task is not None: 2316 if task is not None:
2093 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2317 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2094 2318
2095 if self.rqdata.setscenewhitelist is not None: 2319 if self.rqdata.setscene_ignore_tasks is not None:
2096 if self.check_setscenewhitelist(task): 2320 if self.check_setscene_ignore_tasks(task):
2097 self.task_fail(task, "setscene whitelist") 2321 self.task_fail(task, "setscene ignore_tasks")
2098 return True 2322 return True
2099 2323
2100 if task in self.tasks_covered: 2324 if task in self.tasks_covered:
@@ -2117,18 +2341,32 @@ class RunQueueExecute:
2117 self.runq_running.add(task) 2341 self.runq_running.add(task)
2118 self.stats.taskActive() 2342 self.stats.taskActive()
2119 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2343 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2120 bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn) 2344 bb.build.make_stamp_mcfn(taskname, taskfn)
2121 self.task_complete(task) 2345 self.task_complete(task)
2122 return True 2346 return True
2123 else: 2347 else:
2124 startevent = runQueueTaskStarted(task, self.stats, self.rq) 2348 startevent = runQueueTaskStarted(task, self.stats, self.rq)
2125 bb.event.fire(startevent, self.cfgData) 2349 bb.event.fire(startevent, self.cfgData)
2126 2350
2127 taskdepdata = self.build_taskdepdata(task)
2128
2129 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2351 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2130 taskhash = self.rqdata.get_task_hash(task) 2352 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2131 unihash = self.rqdata.get_task_unihash(task) 2353 runtask = {
2354 'fn' : taskfn,
2355 'task' : task,
2356 'taskname' : taskname,
2357 'taskhash' : self.rqdata.get_task_hash(task),
2358 'unihash' : self.rqdata.get_task_unihash(task),
2359 'quieterrors' : False,
2360 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2361 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2362 'taskdepdata' : self.build_taskdepdata(task),
2363 'dry_run' : self.rqdata.setscene_enforce,
2364 'taskdep': taskdep,
2365 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2366 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2367 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2368 }
2369
2132 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2370 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2133 if not mc in self.rq.fakeworker: 2371 if not mc in self.rq.fakeworker:
2134 try: 2372 try:
@@ -2138,31 +2376,31 @@ class RunQueueExecute:
2138 self.rq.state = runQueueFailed 2376 self.rq.state = runQueueFailed
2139 self.stats.taskFailed() 2377 self.stats.taskFailed()
2140 return True 2378 return True
2141 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2379 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2142 self.rq.fakeworker[mc].process.stdin.flush() 2380 self.rq.fakeworker[mc].process.stdin.flush()
2143 else: 2381 else:
2144 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2382 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2145 self.rq.worker[mc].process.stdin.flush() 2383 self.rq.worker[mc].process.stdin.flush()
2146 2384
2147 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2385 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2148 self.build_stamps2.append(self.build_stamps[task]) 2386 self.build_stamps2.append(self.build_stamps[task])
2149 self.runq_running.add(task) 2387 self.runq_running.add(task)
2150 self.stats.taskActive() 2388 self.stats.taskActive()
2151 if self.can_start_task(): 2389 if self.can_start_task():
2152 return True 2390 return True
2153 2391
2154 if self.stats.active > 0 or self.sq_stats.active > 0: 2392 if self.stats.active > 0 or self.sq_live:
2155 self.rq.read_workers() 2393 self.rq.read_workers()
2156 return self.rq.active_fds() 2394 return self.rq.active_fds()
2157 2395
2158 # No more tasks can be run. If we have deferred setscene tasks we should run them. 2396 # No more tasks can be run. If we have deferred setscene tasks we should run them.
2159 if self.sq_deferred: 2397 if self.sq_deferred:
2160 tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0]) 2398 deferred_tid = list(self.sq_deferred.keys())[0]
2161 logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid) 2399 blocking_tid = self.sq_deferred.pop(deferred_tid)
2162 self.sq_task_failoutright(tid) 2400 logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid))
2163 return True 2401 return True
2164 2402
2165 if len(self.failed_tids) != 0: 2403 if self.failed_tids:
2166 self.rq.state = runQueueFailed 2404 self.rq.state = runQueueFailed
2167 return True 2405 return True
2168 2406
@@ -2195,6 +2433,22 @@ class RunQueueExecute:
2195 ret.add(dep) 2433 ret.add(dep)
2196 return ret 2434 return ret
2197 2435
2436 # Build the individual cache entries in advance once to save time
2437 def build_taskdepdata_cache(self):
2438 taskdepdata_cache = {}
2439 for task in self.rqdata.runtaskentries:
2440 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2441 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2442 deps = self.rqdata.runtaskentries[task].depends
2443 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
2444 taskhash = self.rqdata.runtaskentries[task].hash
2445 unihash = self.rqdata.runtaskentries[task].unihash
2446 deps = self.filtermcdeps(task, mc, deps)
2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
2448 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
2449
2450 self.taskdepdata_cache = taskdepdata_cache
2451
2198 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks 2452 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks
2199 # as most code can't handle them 2453 # as most code can't handle them
2200 def build_taskdepdata(self, task): 2454 def build_taskdepdata(self, task):
@@ -2206,15 +2460,9 @@ class RunQueueExecute:
2206 while next: 2460 while next:
2207 additional = [] 2461 additional = []
2208 for revdep in next: 2462 for revdep in next:
2209 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2463 self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash
2210 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2464 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2211 deps = self.rqdata.runtaskentries[revdep].depends 2465 for revdep2 in self.taskdepdata_cache[revdep][3]:
2212 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
2213 taskhash = self.rqdata.runtaskentries[revdep].hash
2214 unihash = self.rqdata.runtaskentries[revdep].unihash
2215 deps = self.filtermcdeps(task, mc, deps)
2216 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
2217 for revdep2 in deps:
2218 if revdep2 not in taskdepdata: 2466 if revdep2 not in taskdepdata:
2219 additional.append(revdep2) 2467 additional.append(revdep2)
2220 next = additional 2468 next = additional
@@ -2228,7 +2476,7 @@ class RunQueueExecute:
2228 return 2476 return
2229 2477
2230 notcovered = set(self.scenequeue_notcovered) 2478 notcovered = set(self.scenequeue_notcovered)
2231 notcovered |= self.cantskip 2479 notcovered |= self.sqdata.cantskip
2232 for tid in self.scenequeue_notcovered: 2480 for tid in self.scenequeue_notcovered:
2233 notcovered |= self.sqdata.sq_covered_tasks[tid] 2481 notcovered |= self.sqdata.sq_covered_tasks[tid]
2234 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids) 2482 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2241,7 +2489,7 @@ class RunQueueExecute:
2241 covered.intersection_update(self.tasks_scenequeue_done) 2489 covered.intersection_update(self.tasks_scenequeue_done)
2242 2490
2243 for tid in notcovered | covered: 2491 for tid in notcovered | covered:
2244 if len(self.rqdata.runtaskentries[tid].depends) == 0: 2492 if not self.rqdata.runtaskentries[tid].depends:
2245 self.setbuildable(tid) 2493 self.setbuildable(tid)
2246 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete): 2494 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete):
2247 self.setbuildable(tid) 2495 self.setbuildable(tid)
@@ -2273,10 +2521,19 @@ class RunQueueExecute:
2273 self.updated_taskhash_queue.remove((tid, unihash)) 2521 self.updated_taskhash_queue.remove((tid, unihash))
2274 2522
2275 if unihash != self.rqdata.runtaskentries[tid].unihash: 2523 if unihash != self.rqdata.runtaskentries[tid].unihash:
2276 hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash)) 2524 # Make sure we rehash any other tasks with the same task hash that we're deferred against.
2277 self.rqdata.runtaskentries[tid].unihash = unihash 2525 torehash = [tid]
2278 bb.parse.siggen.set_unihash(tid, unihash) 2526 for deftid in self.sq_deferred:
2279 toprocess.add(tid) 2527 if self.sq_deferred[deftid] == tid:
2528 torehash.append(deftid)
2529 for hashtid in torehash:
2530 hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
2531 self.rqdata.runtaskentries[hashtid].unihash = unihash
2532 bb.parse.siggen.set_unihash(hashtid, unihash)
2533 toprocess.add(hashtid)
2534 if torehash:
2535 # Need to save after set_unihash above
2536 bb.parse.siggen.save_unitaskhashes()
2280 2537
2281 # Work out all tasks which depend upon these 2538 # Work out all tasks which depend upon these
2282 total = set() 2539 total = set()
@@ -2294,7 +2551,7 @@ class RunQueueExecute:
2294 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash 2551 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash
2295 next = set() 2552 next = set()
2296 for p in total: 2553 for p in total:
2297 if len(self.rqdata.runtaskentries[p].depends) == 0: 2554 if not self.rqdata.runtaskentries[p].depends:
2298 next.add(p) 2555 next.add(p)
2299 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2556 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2300 next.add(p) 2557 next.add(p)
@@ -2304,11 +2561,10 @@ class RunQueueExecute:
2304 current = next.copy() 2561 current = next.copy()
2305 next = set() 2562 next = set()
2306 for tid in current: 2563 for tid in current:
2307 if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2564 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2308 continue 2565 continue
2309 orighash = self.rqdata.runtaskentries[tid].hash 2566 orighash = self.rqdata.runtaskentries[tid].hash
2310 dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid)) 2567 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2311 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
2312 origuni = self.rqdata.runtaskentries[tid].unihash 2568 origuni = self.rqdata.runtaskentries[tid].unihash
2313 newuni = bb.parse.siggen.get_unihash(tid) 2569 newuni = bb.parse.siggen.get_unihash(tid)
2314 # FIXME, need to check it can come from sstate at all for determinism? 2570 # FIXME, need to check it can come from sstate at all for determinism?
@@ -2334,9 +2590,9 @@ class RunQueueExecute:
2334 2590
2335 if changed: 2591 if changed:
2336 for mc in self.rq.worker: 2592 for mc in self.rq.worker:
2337 self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2593 RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2338 for mc in self.rq.fakeworker: 2594 for mc in self.rq.fakeworker:
2339 self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2595 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2340 2596
2341 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) 2597 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
2342 2598
@@ -2370,7 +2626,7 @@ class RunQueueExecute:
2370 self.tasks_scenequeue_done.remove(tid) 2626 self.tasks_scenequeue_done.remove(tid)
2371 for dep in self.sqdata.sq_covered_tasks[tid]: 2627 for dep in self.sqdata.sq_covered_tasks[tid]:
2372 if dep in self.runq_complete and dep not in self.runq_tasksrun: 2628 if dep in self.runq_complete and dep not in self.runq_tasksrun:
2373 bb.error("Task %s marked as completed but now needing to rerun? Aborting build." % dep) 2629 bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep)
2374 self.failed_tids.append(tid) 2630 self.failed_tids.append(tid)
2375 self.rq.state = runQueueCleanUp 2631 self.rq.state = runQueueCleanUp
2376 return 2632 return
@@ -2383,17 +2639,6 @@ class RunQueueExecute:
2383 self.sq_buildable.remove(tid) 2639 self.sq_buildable.remove(tid)
2384 if tid in self.sq_running: 2640 if tid in self.sq_running:
2385 self.sq_running.remove(tid) 2641 self.sq_running.remove(tid)
2386 harddepfail = False
2387 for t in self.sqdata.sq_harddeps:
2388 if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered:
2389 harddepfail = True
2390 break
2391 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2392 if tid not in self.sq_buildable:
2393 self.sq_buildable.add(tid)
2394 if len(self.sqdata.sq_revdeps[tid]) == 0:
2395 self.sq_buildable.add(tid)
2396
2397 if tid in self.sqdata.outrightfail: 2642 if tid in self.sqdata.outrightfail:
2398 self.sqdata.outrightfail.remove(tid) 2643 self.sqdata.outrightfail.remove(tid)
2399 if tid in self.scenequeue_notcovered: 2644 if tid in self.scenequeue_notcovered:
@@ -2404,7 +2649,7 @@ class RunQueueExecute:
2404 self.scenequeue_notneeded.remove(tid) 2649 self.scenequeue_notneeded.remove(tid)
2405 2650
2406 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2651 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2407 self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True) 2652 self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2408 2653
2409 if tid in self.stampcache: 2654 if tid in self.stampcache:
2410 del self.stampcache[tid] 2655 del self.stampcache[tid]
@@ -2412,28 +2657,62 @@ class RunQueueExecute:
2412 if tid in self.build_stamps: 2657 if tid in self.build_stamps:
2413 del self.build_stamps[tid] 2658 del self.build_stamps[tid]
2414 2659
2415 update_tasks.append((tid, harddepfail, tid in self.sqdata.valid)) 2660 update_tasks.append(tid)
2661
2662 update_tasks2 = []
2663 for tid in update_tasks:
2664 harddepfail = False
2665 for t in self.sqdata.sq_harddeps_rev[tid]:
2666 if t in self.scenequeue_notcovered:
2667 harddepfail = True
2668 break
2669 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2670 if tid not in self.sq_buildable:
2671 self.sq_buildable.add(tid)
2672 if not self.sqdata.sq_revdeps[tid]:
2673 self.sq_buildable.add(tid)
2416 2674
2417 if update_tasks: 2675 update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid))
2676
2677 if update_tasks2:
2418 self.sqdone = False 2678 self.sqdone = False
2419 update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) 2679 for mc in sorted(self.sqdata.multiconfigs):
2680 for tid in sorted([t[0] for t in update_tasks2]):
2681 if mc_from_tid(tid) != mc:
2682 continue
2683 h = pending_hash_index(tid, self.rqdata)
2684 if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
2685 self.sq_deferred[tid] = self.sqdata.hashes[h]
2686 bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
2687 update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2420 2688
2421 for (tid, harddepfail, origvalid) in update_tasks: 2689 for (tid, harddepfail, origvalid) in update_tasks2:
2422 if tid in self.sqdata.valid and not origvalid: 2690 if tid in self.sqdata.valid and not origvalid:
2423 hashequiv_logger.verbose("Setscene task %s became valid" % tid) 2691 hashequiv_logger.verbose("Setscene task %s became valid" % tid)
2424 if harddepfail: 2692 if harddepfail:
2693 logger.debug2("%s has an unavailable hard dependency so skipping" % (tid))
2425 self.sq_task_failoutright(tid) 2694 self.sq_task_failoutright(tid)
2426 2695
2427 if changed: 2696 if changed:
2697 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2698 self.sq_needed_harddeps = set()
2699 self.sq_harddep_deferred = set()
2428 self.holdoff_need_update = True 2700 self.holdoff_need_update = True
2429 2701
2430 def scenequeue_updatecounters(self, task, fail=False): 2702 def scenequeue_updatecounters(self, task, fail=False):
2431 2703
2432 for dep in sorted(self.sqdata.sq_deps[task]): 2704 if fail and task in self.sqdata.sq_harddeps:
2433 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: 2705 for dep in sorted(self.sqdata.sq_harddeps[task]):
2706 if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
2707 # dependency could be already processed, e.g. noexec setscene task
2708 continue
2709 noexec, stamppresent = check_setscene_stamps(dep, self.rqdata, self.rq, self.stampcache)
2710 if noexec or stamppresent:
2711 continue
2434 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2712 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2435 self.sq_task_failoutright(dep) 2713 self.sq_task_failoutright(dep)
2436 continue 2714 continue
2715 for dep in sorted(self.sqdata.sq_deps[task]):
2437 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2716 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2438 if dep not in self.sq_buildable: 2717 if dep not in self.sq_buildable:
2439 self.sq_buildable.add(dep) 2718 self.sq_buildable.add(dep)
@@ -2452,6 +2731,14 @@ class RunQueueExecute:
2452 new.add(dep) 2731 new.add(dep)
2453 next = new 2732 next = new
2454 2733
2734 # If this task was one which other setscene tasks have a hard dependency upon, we need
2735 # to walk through the hard dependencies and allow execution of those which have completed dependencies.
2736 if task in self.sqdata.sq_harddeps:
2737 for dep in self.sq_harddep_deferred.copy():
2738 if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2739 self.sq_harddep_deferred.remove(dep)
2740
2741 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2455 self.holdoff_need_update = True 2742 self.holdoff_need_update = True
2456 2743
2457 def sq_task_completeoutright(self, task): 2744 def sq_task_completeoutright(self, task):
@@ -2466,22 +2753,20 @@ class RunQueueExecute:
2466 self.scenequeue_updatecounters(task) 2753 self.scenequeue_updatecounters(task)
2467 2754
2468 def sq_check_taskfail(self, task): 2755 def sq_check_taskfail(self, task):
2469 if self.rqdata.setscenewhitelist is not None: 2756 if self.rqdata.setscene_ignore_tasks is not None:
2470 realtask = task.split('_setscene')[0] 2757 realtask = task.split('_setscene')[0]
2471 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask) 2758 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask)
2472 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2759 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2473 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2760 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2474 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) 2761 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
2475 self.rq.state = runQueueCleanUp 2762 self.rq.state = runQueueCleanUp
2476 2763
2477 def sq_task_complete(self, task): 2764 def sq_task_complete(self, task):
2478 self.sq_stats.taskCompleted() 2765 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
2479 bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData)
2480 self.sq_task_completeoutright(task) 2766 self.sq_task_completeoutright(task)
2481 2767
2482 def sq_task_fail(self, task, result): 2768 def sq_task_fail(self, task, result):
2483 self.sq_stats.taskFailed() 2769 bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
2484 bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData)
2485 self.scenequeue_notcovered.add(task) 2770 self.scenequeue_notcovered.add(task)
2486 self.scenequeue_updatecounters(task, True) 2771 self.scenequeue_updatecounters(task, True)
2487 self.sq_check_taskfail(task) 2772 self.sq_check_taskfail(task)
@@ -2489,8 +2774,6 @@ class RunQueueExecute:
2489 def sq_task_failoutright(self, task): 2774 def sq_task_failoutright(self, task):
2490 self.sq_running.add(task) 2775 self.sq_running.add(task)
2491 self.sq_buildable.add(task) 2776 self.sq_buildable.add(task)
2492 self.sq_stats.taskSkipped()
2493 self.sq_stats.taskCompleted()
2494 self.scenequeue_notcovered.add(task) 2777 self.scenequeue_notcovered.add(task)
2495 self.scenequeue_updatecounters(task, True) 2778 self.scenequeue_updatecounters(task, True)
2496 2779
@@ -2498,8 +2781,6 @@ class RunQueueExecute:
2498 self.sq_running.add(task) 2781 self.sq_running.add(task)
2499 self.sq_buildable.add(task) 2782 self.sq_buildable.add(task)
2500 self.sq_task_completeoutright(task) 2783 self.sq_task_completeoutright(task)
2501 self.sq_stats.taskSkipped()
2502 self.sq_stats.taskCompleted()
2503 2784
2504 def sq_build_taskdepdata(self, task): 2785 def sq_build_taskdepdata(self, task):
2505 def getsetscenedeps(tid): 2786 def getsetscenedeps(tid):
@@ -2530,7 +2811,8 @@ class RunQueueExecute:
2530 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2811 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn]
2531 taskhash = self.rqdata.runtaskentries[revdep].hash 2812 taskhash = self.rqdata.runtaskentries[revdep].hash
2532 unihash = self.rqdata.runtaskentries[revdep].unihash 2813 unihash = self.rqdata.runtaskentries[revdep].unihash
2533 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash] 2814 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
2815 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn]
2534 for revdep2 in deps: 2816 for revdep2 in deps:
2535 if revdep2 not in taskdepdata: 2817 if revdep2 not in taskdepdata:
2536 additional.append(revdep2) 2818 additional.append(revdep2)
@@ -2539,8 +2821,8 @@ class RunQueueExecute:
2539 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) 2821 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
2540 return taskdepdata 2822 return taskdepdata
2541 2823
2542 def check_setscenewhitelist(self, tid): 2824 def check_setscene_ignore_tasks(self, tid):
2543 # Check task that is going to run against the whitelist 2825 # Check task that is going to run against the ignore tasks list
2544 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2826 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2545 # Ignore covered tasks 2827 # Ignore covered tasks
2546 if tid in self.tasks_covered: 2828 if tid in self.tasks_covered:
@@ -2554,14 +2836,15 @@ class RunQueueExecute:
2554 return False 2836 return False
2555 2837
2556 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2838 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2557 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2839 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2558 if tid in self.rqdata.runq_setscene_tids: 2840 if tid in self.rqdata.runq_setscene_tids:
2559 msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname) 2841 msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)]
2560 else: 2842 else:
2561 msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname) 2843 msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)]
2562 for t in self.scenequeue_notcovered: 2844 for t in self.scenequeue_notcovered:
2563 msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash) 2845 msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash))
2564 logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered)) 2846 msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
2847 logger.error("".join(msg))
2565 return True 2848 return True
2566 return False 2849 return False
2567 2850
@@ -2573,6 +2856,7 @@ class SQData(object):
2573 self.sq_revdeps = {} 2856 self.sq_revdeps = {}
2574 # Injected inter-setscene task dependencies 2857 # Injected inter-setscene task dependencies
2575 self.sq_harddeps = {} 2858 self.sq_harddeps = {}
2859 self.sq_harddeps_rev = {}
2576 # Cache of stamp files so duplicates can't run in parallel 2860 # Cache of stamp files so duplicates can't run in parallel
2577 self.stamps = {} 2861 self.stamps = {}
2578 # Setscene tasks directly depended upon by the build 2862 # Setscene tasks directly depended upon by the build
@@ -2582,12 +2866,17 @@ class SQData(object):
2582 # A list of normal tasks a setscene task covers 2866 # A list of normal tasks a setscene task covers
2583 self.sq_covered_tasks = {} 2867 self.sq_covered_tasks = {}
2584 2868
2585def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): 2869def build_scenequeue_data(sqdata, rqdata, sqrq):
2586 2870
2587 sq_revdeps = {} 2871 sq_revdeps = {}
2588 sq_revdeps_squash = {} 2872 sq_revdeps_squash = {}
2589 sq_collated_deps = {} 2873 sq_collated_deps = {}
2590 2874
2875 # We can't skip specified target tasks which aren't setscene tasks
2876 sqdata.cantskip = set(rqdata.target_tids)
2877 sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
2878 sqdata.cantskip.intersection_update(rqdata.runtaskentries)
2879
2591 # We need to construct a dependency graph for the setscene functions. Intermediate 2880 # We need to construct a dependency graph for the setscene functions. Intermediate
2592 # dependencies between the setscene tasks only complicate the code. This code 2881 # dependencies between the setscene tasks only complicate the code. This code
2593 # therefore aims to collapse the huge runqueue dependency tree into a smaller one 2882 # therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2600,7 +2889,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2600 for tid in rqdata.runtaskentries: 2889 for tid in rqdata.runtaskentries:
2601 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps) 2890 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps)
2602 sq_revdeps_squash[tid] = set() 2891 sq_revdeps_squash[tid] = set()
2603 if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids: 2892 if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids:
2604 #bb.warn("Added endpoint %s" % (tid)) 2893 #bb.warn("Added endpoint %s" % (tid))
2605 endpoints[tid] = set() 2894 endpoints[tid] = set()
2606 2895
@@ -2634,16 +2923,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2634 sq_revdeps_squash[point] = set() 2923 sq_revdeps_squash[point] = set()
2635 if point in rqdata.runq_setscene_tids: 2924 if point in rqdata.runq_setscene_tids:
2636 sq_revdeps_squash[point] = tasks 2925 sq_revdeps_squash[point] = tasks
2637 tasks = set()
2638 continue 2926 continue
2639 for dep in rqdata.runtaskentries[point].depends: 2927 for dep in rqdata.runtaskentries[point].depends:
2640 if point in sq_revdeps[dep]: 2928 if point in sq_revdeps[dep]:
2641 sq_revdeps[dep].remove(point) 2929 sq_revdeps[dep].remove(point)
2642 if tasks: 2930 if tasks:
2643 sq_revdeps_squash[dep] |= tasks 2931 sq_revdeps_squash[dep] |= tasks
2644 if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids: 2932 if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids:
2645 newendpoints[dep] = task 2933 newendpoints[dep] = task
2646 if len(newendpoints) != 0: 2934 if newendpoints:
2647 process_endpoints(newendpoints) 2935 process_endpoints(newendpoints)
2648 2936
2649 process_endpoints(endpoints) 2937 process_endpoints(endpoints)
@@ -2655,16 +2943,16 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2655 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon 2943 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon
2656 new = True 2944 new = True
2657 for tid in rqdata.runtaskentries: 2945 for tid in rqdata.runtaskentries:
2658 if len(rqdata.runtaskentries[tid].revdeps) == 0: 2946 if not rqdata.runtaskentries[tid].revdeps:
2659 sqdata.unskippable.add(tid) 2947 sqdata.unskippable.add(tid)
2660 sqdata.unskippable |= sqrq.cantskip 2948 sqdata.unskippable |= sqdata.cantskip
2661 while new: 2949 while new:
2662 new = False 2950 new = False
2663 orig = sqdata.unskippable.copy() 2951 orig = sqdata.unskippable.copy()
2664 for tid in sorted(orig, reverse=True): 2952 for tid in sorted(orig, reverse=True):
2665 if tid in rqdata.runq_setscene_tids: 2953 if tid in rqdata.runq_setscene_tids:
2666 continue 2954 continue
2667 if len(rqdata.runtaskentries[tid].depends) == 0: 2955 if not rqdata.runtaskentries[tid].depends:
2668 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable 2956 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable
2669 sqrq.setbuildable(tid) 2957 sqrq.setbuildable(tid)
2670 sqdata.unskippable |= rqdata.runtaskentries[tid].depends 2958 sqdata.unskippable |= rqdata.runtaskentries[tid].depends
@@ -2679,8 +2967,8 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2679 for taskcounter, tid in enumerate(rqdata.runtaskentries): 2967 for taskcounter, tid in enumerate(rqdata.runtaskentries):
2680 if tid in rqdata.runq_setscene_tids: 2968 if tid in rqdata.runq_setscene_tids:
2681 pass 2969 pass
2682 elif len(sq_revdeps_squash[tid]) != 0: 2970 elif sq_revdeps_squash[tid]:
2683 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.") 2971 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
2684 else: 2972 else:
2685 del sq_revdeps_squash[tid] 2973 del sq_revdeps_squash[tid]
2686 rqdata.init_progress_reporter.update(taskcounter) 2974 rqdata.init_progress_reporter.update(taskcounter)
@@ -2694,7 +2982,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2694 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2982 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2695 realtid = tid + "_setscene" 2983 realtid = tid + "_setscene"
2696 idepends = rqdata.taskData[mc].taskentries[realtid].idepends 2984 idepends = rqdata.taskData[mc].taskentries[realtid].idepends
2697 sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True) 2985 sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2986
2987 sqdata.sq_harddeps_rev[tid] = set()
2698 for (depname, idependtask) in idepends: 2988 for (depname, idependtask) in idepends:
2699 2989
2700 if depname not in rqdata.taskData[mc].build_targets: 2990 if depname not in rqdata.taskData[mc].build_targets:
@@ -2707,20 +2997,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2707 if deptid not in rqdata.runtaskentries: 2997 if deptid not in rqdata.runtaskentries:
2708 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask)) 2998 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
2709 2999
3000 logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid))
3001
2710 if not deptid in sqdata.sq_harddeps: 3002 if not deptid in sqdata.sq_harddeps:
2711 sqdata.sq_harddeps[deptid] = set() 3003 sqdata.sq_harddeps[deptid] = set()
2712 sqdata.sq_harddeps[deptid].add(tid) 3004 sqdata.sq_harddeps[deptid].add(tid)
2713 3005 sqdata.sq_harddeps_rev[tid].add(deptid)
2714 sq_revdeps_squash[tid].add(deptid)
2715 # Have to zero this to avoid circular dependencies
2716 sq_revdeps_squash[deptid] = set()
2717 3006
2718 rqdata.init_progress_reporter.next_stage() 3007 rqdata.init_progress_reporter.next_stage()
2719 3008
2720 for task in sqdata.sq_harddeps:
2721 for dep in sqdata.sq_harddeps[task]:
2722 sq_revdeps_squash[dep].add(task)
2723
2724 rqdata.init_progress_reporter.next_stage() 3009 rqdata.init_progress_reporter.next_stage()
2725 3010
2726 #for tid in sq_revdeps_squash: 3011 #for tid in sq_revdeps_squash:
@@ -2744,16 +3029,47 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2744 sqdata.multiconfigs = set() 3029 sqdata.multiconfigs = set()
2745 for tid in sqdata.sq_revdeps: 3030 for tid in sqdata.sq_revdeps:
2746 sqdata.multiconfigs.add(mc_from_tid(tid)) 3031 sqdata.multiconfigs.add(mc_from_tid(tid))
2747 if len(sqdata.sq_revdeps[tid]) == 0: 3032 if not sqdata.sq_revdeps[tid]:
2748 sqrq.sq_buildable.add(tid) 3033 sqrq.sq_buildable.add(tid)
2749 3034
2750 rqdata.init_progress_reporter.finish() 3035 rqdata.init_progress_reporter.next_stage()
2751 3036
2752 sqdata.noexec = set() 3037 sqdata.noexec = set()
2753 sqdata.stamppresent = set() 3038 sqdata.stamppresent = set()
2754 sqdata.valid = set() 3039 sqdata.valid = set()
2755 3040
2756 update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True) 3041 sqdata.hashes = {}
3042 sqrq.sq_deferred = {}
3043 for mc in sorted(sqdata.multiconfigs):
3044 for tid in sorted(sqdata.sq_revdeps):
3045 if mc_from_tid(tid) != mc:
3046 continue
3047 h = pending_hash_index(tid, rqdata)
3048 if h not in sqdata.hashes:
3049 sqdata.hashes[h] = tid
3050 else:
3051 sqrq.sq_deferred[tid] = sqdata.hashes[h]
3052 bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
3053
3054def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
3055
3056 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
3057
3058 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
3059
3060 if 'noexec' in taskdep and taskname in taskdep['noexec']:
3061 bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn)
3062 return True, False
3063
3064 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
3065 logger.debug2('Setscene stamp current for task %s', tid)
3066 return False, True
3067
3068 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
3069 logger.debug2('Normal stamp current for task %s', tid)
3070 return False, True
3071
3072 return False, False
2757 3073
2758def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): 3074def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
2759 3075
@@ -2764,55 +3080,42 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2764 sqdata.stamppresent.remove(tid) 3080 sqdata.stamppresent.remove(tid)
2765 if tid in sqdata.valid: 3081 if tid in sqdata.valid:
2766 sqdata.valid.remove(tid) 3082 sqdata.valid.remove(tid)
3083 if tid in sqdata.outrightfail:
3084 sqdata.outrightfail.remove(tid)
2767 3085
2768 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 3086 noexec, stamppresent = check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=True)
2769
2770 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
2771 3087
2772 if 'noexec' in taskdep and taskname in taskdep['noexec']: 3088 if noexec:
2773 sqdata.noexec.add(tid) 3089 sqdata.noexec.add(tid)
2774 sqrq.sq_task_skip(tid) 3090 sqrq.sq_task_skip(tid)
2775 bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn) 3091 logger.debug2("%s is noexec so skipping setscene" % (tid))
2776 continue
2777
2778 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
2779 logger.debug2('Setscene stamp current for task %s', tid)
2780 sqdata.stamppresent.add(tid)
2781 sqrq.sq_task_skip(tid)
2782 continue 3092 continue
2783 3093
2784 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): 3094 if stamppresent:
2785 logger.debug2('Normal stamp current for task %s', tid)
2786 sqdata.stamppresent.add(tid) 3095 sqdata.stamppresent.add(tid)
2787 sqrq.sq_task_skip(tid) 3096 sqrq.sq_task_skip(tid)
3097 logger.debug2("%s has a valid stamp, skipping" % (tid))
2788 continue 3098 continue
2789 3099
2790 tocheck.add(tid) 3100 tocheck.add(tid)
2791 3101
2792 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) 3102 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
2793 3103
2794 sqdata.hashes = {} 3104 for tid in tids:
2795 for mc in sorted(sqdata.multiconfigs): 3105 if tid in sqdata.stamppresent:
2796 for tid in sorted(sqdata.sq_revdeps): 3106 continue
2797 if mc_from_tid(tid) != mc: 3107 if tid in sqdata.valid:
2798 continue 3108 continue
2799 if tid in sqdata.stamppresent: 3109 if tid in sqdata.noexec:
2800 continue 3110 continue
2801 if tid in sqdata.valid: 3111 if tid in sqrq.scenequeue_covered:
2802 continue 3112 continue
2803 if tid in sqdata.noexec: 3113 if tid in sqrq.scenequeue_notcovered:
2804 continue 3114 continue
2805 if tid in sqrq.scenequeue_notcovered: 3115 if tid in sqrq.sq_deferred:
2806 continue 3116 continue
2807 sqdata.outrightfail.add(tid) 3117 sqdata.outrightfail.add(tid)
2808 3118 logger.debug2("%s already handled (fallthrough), skipping" % (tid))
2809 h = pending_hash_index(tid, rqdata)
2810 if h not in sqdata.hashes:
2811 sqdata.hashes[h] = tid
2812 else:
2813 sqrq.sq_deferred[tid] = sqdata.hashes[h]
2814 bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
2815
2816 3119
2817class TaskFailure(Exception): 3120class TaskFailure(Exception):
2818 """ 3121 """
@@ -2876,12 +3179,16 @@ class runQueueTaskFailed(runQueueEvent):
2876 """ 3179 """
2877 Event notifying a task failed 3180 Event notifying a task failed
2878 """ 3181 """
2879 def __init__(self, task, stats, exitcode, rq): 3182 def __init__(self, task, stats, exitcode, rq, fakeroot_log=None):
2880 runQueueEvent.__init__(self, task, stats, rq) 3183 runQueueEvent.__init__(self, task, stats, rq)
2881 self.exitcode = exitcode 3184 self.exitcode = exitcode
3185 self.fakeroot_log = fakeroot_log
2882 3186
2883 def __str__(self): 3187 def __str__(self):
2884 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode) 3188 if self.fakeroot_log:
3189 return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log)
3190 else:
3191 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
2885 3192
2886class sceneQueueTaskFailed(sceneQueueEvent): 3193class sceneQueueTaskFailed(sceneQueueEvent):
2887 """ 3194 """
@@ -2933,18 +3240,16 @@ class runQueuePipe():
2933 """ 3240 """
2934 Abstraction for a pipe between a worker thread and the server 3241 Abstraction for a pipe between a worker thread and the server
2935 """ 3242 """
2936 def __init__(self, pipein, pipeout, d, rq, rqexec): 3243 def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None):
2937 self.input = pipein 3244 self.input = pipein
2938 if pipeout: 3245 if pipeout:
2939 pipeout.close() 3246 pipeout.close()
2940 bb.utils.nonblockingfd(self.input) 3247 bb.utils.nonblockingfd(self.input)
2941 self.queue = b"" 3248 self.queue = bytearray()
2942 self.d = d 3249 self.d = d
2943 self.rq = rq 3250 self.rq = rq
2944 self.rqexec = rqexec 3251 self.rqexec = rqexec
2945 3252 self.fakerootlogs = fakerootlogs
2946 def setrunqueueexec(self, rqexec):
2947 self.rqexec = rqexec
2948 3253
2949 def read(self): 3254 def read(self):
2950 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]: 3255 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
@@ -2956,13 +3261,13 @@ class runQueuePipe():
2956 3261
2957 start = len(self.queue) 3262 start = len(self.queue)
2958 try: 3263 try:
2959 self.queue = self.queue + (self.input.read(102400) or b"") 3264 self.queue.extend(self.input.read(102400) or b"")
2960 except (OSError, IOError) as e: 3265 except (OSError, IOError) as e:
2961 if e.errno != errno.EAGAIN: 3266 if e.errno != errno.EAGAIN:
2962 raise 3267 raise
2963 end = len(self.queue) 3268 end = len(self.queue)
2964 found = True 3269 found = True
2965 while found and len(self.queue): 3270 while found and self.queue:
2966 found = False 3271 found = False
2967 index = self.queue.find(b"</event>") 3272 index = self.queue.find(b"</event>")
2968 while index != -1 and self.queue.startswith(b"<event>"): 3273 while index != -1 and self.queue.startswith(b"<event>"):
@@ -2987,7 +3292,11 @@ class runQueuePipe():
2987 task, status = pickle.loads(self.queue[10:index]) 3292 task, status = pickle.loads(self.queue[10:index])
2988 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e: 3293 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
2989 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index])) 3294 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
2990 self.rqexec.runqueue_process_waitpid(task, status) 3295 (_, _, _, taskfn) = split_tid_mcfn(task)
3296 fakerootlog = None
3297 if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs:
3298 fakerootlog = self.fakerootlogs[taskfn]
3299 self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog)
2991 found = True 3300 found = True
2992 self.queue = self.queue[index+11:] 3301 self.queue = self.queue[index+11:]
2993 index = self.queue.find(b"</exitcode>") 3302 index = self.queue.find(b"</exitcode>")
@@ -2996,16 +3305,16 @@ class runQueuePipe():
2996 def close(self): 3305 def close(self):
2997 while self.read(): 3306 while self.read():
2998 continue 3307 continue
2999 if len(self.queue) > 0: 3308 if self.queue:
3000 print("Warning, worker left partial message: %s" % self.queue) 3309 print("Warning, worker left partial message: %s" % self.queue)
3001 self.input.close() 3310 self.input.close()
3002 3311
3003def get_setscene_enforce_whitelist(d, targets): 3312def get_setscene_enforce_ignore_tasks(d, targets):
3004 if d.getVar('BB_SETSCENE_ENFORCE') != '1': 3313 if d.getVar('BB_SETSCENE_ENFORCE') != '1':
3005 return None 3314 return None
3006 whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() 3315 ignore_tasks = (d.getVar("BB_SETSCENE_ENFORCE_IGNORE_TASKS") or "").split()
3007 outlist = [] 3316 outlist = []
3008 for item in whitelist[:]: 3317 for item in ignore_tasks[:]:
3009 if item.startswith('%:'): 3318 if item.startswith('%:'):
3010 for (mc, target, task, fn) in targets: 3319 for (mc, target, task, fn) in targets:
3011 outlist.append(target + ':' + item.split(':')[1]) 3320 outlist.append(target + ':' + item.split(':')[1])
@@ -3013,12 +3322,12 @@ def get_setscene_enforce_whitelist(d, targets):
3013 outlist.append(item) 3322 outlist.append(item)
3014 return outlist 3323 return outlist
3015 3324
3016def check_setscene_enforce_whitelist(pn, taskname, whitelist): 3325def check_setscene_enforce_ignore_tasks(pn, taskname, ignore_tasks):
3017 import fnmatch 3326 import fnmatch
3018 if whitelist is not None: 3327 if ignore_tasks is not None:
3019 item = '%s:%s' % (pn, taskname) 3328 item = '%s:%s' % (pn, taskname)
3020 for whitelist_item in whitelist: 3329 for ignore_tasks in ignore_tasks:
3021 if fnmatch.fnmatch(item, whitelist_item): 3330 if fnmatch.fnmatch(item, ignore_tasks):
3022 return True 3331 return True
3023 return False 3332 return False
3024 return True 3333 return True
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index b27b4aefe0..76b189291d 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -26,6 +26,9 @@ import errno
26import re 26import re
27import datetime 27import datetime
28import pickle 28import pickle
29import traceback
30import gc
31import stat
29import bb.server.xmlrpcserver 32import bb.server.xmlrpcserver
30from bb import daemonize 33from bb import daemonize
31from multiprocessing import queues 34from multiprocessing import queues
@@ -35,9 +38,46 @@ logger = logging.getLogger('BitBake')
35class ProcessTimeout(SystemExit): 38class ProcessTimeout(SystemExit):
36 pass 39 pass
37 40
41def currenttime():
42 return datetime.datetime.now().strftime('%H:%M:%S.%f')
43
38def serverlog(msg): 44def serverlog(msg):
39 print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg) 45 print(str(os.getpid()) + " " + currenttime() + " " + msg)
40 sys.stdout.flush() 46 #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server
47 #sys.stdout.flush()
48
49#
50# When we have lockfile issues, try and find infomation about which process is
51# using the lockfile
52#
53def get_lockfile_process_msg(lockfile):
54 # Some systems may not have lsof available
55 procs = None
56 try:
57 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
58 except subprocess.CalledProcessError:
59 # File was deleted?
60 pass
61 except OSError as e:
62 if e.errno != errno.ENOENT:
63 raise
64 if procs is None:
65 # Fall back to fuser if lsof is unavailable
66 try:
67 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
68 except subprocess.CalledProcessError:
69 # File was deleted?
70 pass
71 except OSError as e:
72 if e.errno != errno.ENOENT:
73 raise
74 if procs:
75 return procs.decode("utf-8")
76 return None
77
78class idleFinish():
79 def __init__(self, msg):
80 self.msg = msg
41 81
42class ProcessServer(): 82class ProcessServer():
43 profile_filename = "profile.log" 83 profile_filename = "profile.log"
@@ -56,12 +96,19 @@ class ProcessServer():
56 self.maxuiwait = 30 96 self.maxuiwait = 30
57 self.xmlrpc = False 97 self.xmlrpc = False
58 98
99 self.idle = None
100 # Need a lock for _idlefuns changes
59 self._idlefuns = {} 101 self._idlefuns = {}
102 self._idlefuncsLock = threading.Lock()
103 self.idle_cond = threading.Condition(self._idlefuncsLock)
60 104
61 self.bitbake_lock = lock 105 self.bitbake_lock = lock
62 self.bitbake_lock_name = lockname 106 self.bitbake_lock_name = lockname
63 self.sock = sock 107 self.sock = sock
64 self.sockname = sockname 108 self.sockname = sockname
109 # It is possible the directory may be renamed. Cache the inode of the socket file
110 # so we can tell if things changed.
111 self.sockinode = os.stat(self.sockname)[stat.ST_INO]
65 112
66 self.server_timeout = server_timeout 113 self.server_timeout = server_timeout
67 self.timeout = self.server_timeout 114 self.timeout = self.server_timeout
@@ -70,7 +117,9 @@ class ProcessServer():
70 def register_idle_function(self, function, data): 117 def register_idle_function(self, function, data):
71 """Register a function to be called while the server is idle""" 118 """Register a function to be called while the server is idle"""
72 assert hasattr(function, '__call__') 119 assert hasattr(function, '__call__')
73 self._idlefuns[function] = data 120 with bb.utils.lock_timeout(self._idlefuncsLock):
121 self._idlefuns[function] = data
122 serverlog("Registering idle function %s" % str(function))
74 123
75 def run(self): 124 def run(self):
76 125
@@ -109,6 +158,31 @@ class ProcessServer():
109 158
110 return ret 159 return ret
111 160
161 def _idle_check(self):
162 return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
163
164 def wait_for_idle(self, timeout=30):
165 # Wait for the idle loop to have cleared
166 with bb.utils.lock_timeout(self._idlefuncsLock):
167 return self.idle_cond.wait_for(self._idle_check, timeout) is not False
168
169 def set_async_cmd(self, cmd):
170 with bb.utils.lock_timeout(self._idlefuncsLock):
171 ret = self.idle_cond.wait_for(self._idle_check, 30)
172 if ret is False:
173 return False
174 self.cooker.command.currentAsyncCommand = cmd
175 return True
176
177 def clear_async_cmd(self):
178 with bb.utils.lock_timeout(self._idlefuncsLock):
179 self.cooker.command.currentAsyncCommand = None
180 self.idle_cond.notify_all()
181
182 def get_async_cmd(self):
183 with bb.utils.lock_timeout(self._idlefuncsLock):
184 return self.cooker.command.currentAsyncCommand
185
112 def main(self): 186 def main(self):
113 self.cooker.pre_serve() 187 self.cooker.pre_serve()
114 188
@@ -123,14 +197,19 @@ class ProcessServer():
123 fds.append(self.xmlrpc) 197 fds.append(self.xmlrpc)
124 seendata = False 198 seendata = False
125 serverlog("Entering server connection loop") 199 serverlog("Entering server connection loop")
200 serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname)))
126 201
127 def disconnect_client(self, fds): 202 def disconnect_client(self, fds):
128 serverlog("Disconnecting Client") 203 serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname))
129 if self.controllersock: 204 if self.controllersock:
130 fds.remove(self.controllersock) 205 fds.remove(self.controllersock)
131 self.controllersock.close() 206 self.controllersock.close()
132 self.controllersock = False 207 self.controllersock = False
133 if self.haveui: 208 if self.haveui:
209 # Wait for the idle loop to have cleared (30s max)
210 if not self.wait_for_idle(30):
211 serverlog("Idle loop didn't finish queued commands after 30s, exiting.")
212 self.quit = True
134 fds.remove(self.command_channel) 213 fds.remove(self.command_channel)
135 bb.event.unregister_UIHhandler(self.event_handle, True) 214 bb.event.unregister_UIHhandler(self.event_handle, True)
136 self.command_channel_reply.writer.close() 215 self.command_channel_reply.writer.close()
@@ -142,12 +221,12 @@ class ProcessServer():
142 self.cooker.clientComplete() 221 self.cooker.clientComplete()
143 self.haveui = False 222 self.haveui = False
144 ready = select.select(fds,[],[],0)[0] 223 ready = select.select(fds,[],[],0)[0]
145 if newconnections: 224 if newconnections and not self.quit:
146 serverlog("Starting new client") 225 serverlog("Starting new client")
147 conn = newconnections.pop(-1) 226 conn = newconnections.pop(-1)
148 fds.append(conn) 227 fds.append(conn)
149 self.controllersock = conn 228 self.controllersock = conn
150 elif self.timeout is None and not ready: 229 elif not self.timeout and not ready:
151 serverlog("No timeout, exiting.") 230 serverlog("No timeout, exiting.")
152 self.quit = True 231 self.quit = True
153 232
@@ -214,11 +293,14 @@ class ProcessServer():
214 continue 293 continue
215 try: 294 try:
216 serverlog("Running command %s" % command) 295 serverlog("Running command %s" % command)
217 self.command_channel_reply.send(self.cooker.command.runCommand(command)) 296 reply = self.cooker.command.runCommand(command, self)
218 serverlog("Command Completed") 297 serverlog("Sending reply %s" % repr(reply))
298 self.command_channel_reply.send(reply)
299 serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
219 except Exception as e: 300 except Exception as e:
220 serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e))) 301 stack = traceback.format_exc()
221 logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e))) 302 serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
303 logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack))
222 304
223 if self.xmlrpc in ready: 305 if self.xmlrpc in ready:
224 self.xmlrpc.handle_requests() 306 self.xmlrpc.handle_requests()
@@ -241,19 +323,25 @@ class ProcessServer():
241 323
242 ready = self.idle_commands(.1, fds) 324 ready = self.idle_commands(.1, fds)
243 325
244 if len(threading.enumerate()) != 1: 326 if self.idle:
245 serverlog("More than one thread left?: " + str(threading.enumerate())) 327 self.idle.join()
246 328
247 serverlog("Exiting") 329 serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname))
248 # Remove the socket file so we don't get any more connections to avoid races 330 # Remove the socket file so we don't get any more connections to avoid races
331 # The build directory could have been renamed so if the file isn't the one we created
332 # we shouldn't delete it.
249 try: 333 try:
250 os.unlink(self.sockname) 334 sockinode = os.stat(self.sockname)[stat.ST_INO]
251 except: 335 if sockinode == self.sockinode:
252 pass 336 os.unlink(self.sockname)
337 else:
338 serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode))
339 except Exception as err:
340 serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err))
253 self.sock.close() 341 self.sock.close()
254 342
255 try: 343 try:
256 self.cooker.shutdown(True) 344 self.cooker.shutdown(True, idle=False)
257 self.cooker.notifier.stop() 345 self.cooker.notifier.stop()
258 self.cooker.confignotifier.stop() 346 self.cooker.confignotifier.stop()
259 except: 347 except:
@@ -261,6 +349,9 @@ class ProcessServer():
261 349
262 self.cooker.post_serve() 350 self.cooker.post_serve()
263 351
352 if len(threading.enumerate()) != 1:
353 serverlog("More than one thread left?: " + str(threading.enumerate()))
354
264 # Flush logs before we release the lock 355 # Flush logs before we release the lock
265 sys.stdout.flush() 356 sys.stdout.flush()
266 sys.stderr.flush() 357 sys.stderr.flush()
@@ -276,20 +367,21 @@ class ProcessServer():
276 except FileNotFoundError: 367 except FileNotFoundError:
277 return None 368 return None
278 369
279 lockcontents = get_lock_contents(lockfile)
280 serverlog("Original lockfile contents: " + str(lockcontents))
281
282 lock.close() 370 lock.close()
283 lock = None 371 lock = None
284 372
285 while not lock: 373 while not lock:
286 i = 0 374 i = 0
287 lock = None 375 lock = None
376 if not os.path.exists(os.path.basename(lockfile)):
377 serverlog("Lockfile directory gone, exiting.")
378 return
379
288 while not lock and i < 30: 380 while not lock and i < 30:
289 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False) 381 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
290 if not lock: 382 if not lock:
291 newlockcontents = get_lock_contents(lockfile) 383 newlockcontents = get_lock_contents(lockfile)
292 if newlockcontents != lockcontents: 384 if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]):
293 # A new server was started, the lockfile contents changed, we can exit 385 # A new server was started, the lockfile contents changed, we can exit
294 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents)) 386 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
295 return 387 return
@@ -303,75 +395,108 @@ class ProcessServer():
303 return 395 return
304 396
305 if not lock: 397 if not lock:
306 # Some systems may not have lsof available 398 procs = get_lockfile_process_msg(lockfile)
307 procs = None 399 msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
400 if procs:
401 msg.append(":\n%s" % procs)
402 serverlog("".join(msg))
403
404 def idle_thread(self):
405 if self.cooker.configuration.profile:
406 try:
407 import cProfile as profile
408 except:
409 import profile
410 prof = profile.Profile()
411
412 ret = profile.Profile.runcall(prof, self.idle_thread_internal)
413
414 prof.dump_stats("profile-mainloop.log")
415 bb.utils.process_profilelog("profile-mainloop.log")
416 serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
417 else:
418 self.idle_thread_internal()
419
420 def idle_thread_internal(self):
421 def remove_idle_func(function):
422 with bb.utils.lock_timeout(self._idlefuncsLock):
423 del self._idlefuns[function]
424 self.idle_cond.notify_all()
425
426 while not self.quit:
427 nextsleep = 0.1
428 fds = []
429
430 with bb.utils.lock_timeout(self._idlefuncsLock):
431 items = list(self._idlefuns.items())
432
433 for function, data in items:
308 try: 434 try:
309 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT) 435 retval = function(self, data, False)
310 except subprocess.CalledProcessError: 436 if isinstance(retval, idleFinish):
311 # File was deleted? 437 serverlog("Removing idle function %s at idleFinish" % str(function))
312 continue 438 remove_idle_func(function)
313 except OSError as e: 439 self.cooker.command.finishAsyncCommand(retval.msg)
314 if e.errno != errno.ENOENT: 440 nextsleep = None
315 raise 441 elif retval is False:
316 if procs is None: 442 serverlog("Removing idle function %s" % str(function))
317 # Fall back to fuser if lsof is unavailable 443 remove_idle_func(function)
318 try: 444 nextsleep = None
319 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT) 445 elif retval is True:
320 except subprocess.CalledProcessError: 446 nextsleep = None
321 # File was deleted? 447 elif isinstance(retval, float) and nextsleep:
448 if (retval < nextsleep):
449 nextsleep = retval
450 elif nextsleep is None:
322 continue 451 continue
323 except OSError as e: 452 else:
324 if e.errno != errno.ENOENT: 453 fds = fds + retval
325 raise 454 except SystemExit:
455 raise
456 except Exception as exc:
457 if not isinstance(exc, bb.BBHandledException):
458 logger.exception('Running idle function')
459 remove_idle_func(function)
460 serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc())
461 self.quit = True
326 462
327 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock" 463 # Create new heartbeat event?
328 if procs: 464 now = time.time()
329 msg += ":\n%s" % str(procs.decode("utf-8")) 465 if bb.event._heartbeat_enabled and now >= self.next_heartbeat:
330 serverlog(msg) 466 # We might have missed heartbeats. Just trigger once in
467 # that case and continue after the usual delay.
468 self.next_heartbeat += self.heartbeat_seconds
469 if self.next_heartbeat <= now:
470 self.next_heartbeat = now + self.heartbeat_seconds
471 if hasattr(self.cooker, "data"):
472 heartbeat = bb.event.HeartbeatEvent(now)
473 try:
474 bb.event.fire(heartbeat, self.cooker.data)
475 except Exception as exc:
476 if not isinstance(exc, bb.BBHandledException):
477 logger.exception('Running heartbeat function')
478 serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc())
479 self.quit = True
480 if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat:
481 # Shorten timeout so that we we wake up in time for
482 # the heartbeat.
483 nextsleep = self.next_heartbeat - now
484
485 if nextsleep is not None:
486 select.select(fds,[],[],nextsleep)[0]
331 487
332 def idle_commands(self, delay, fds=None): 488 def idle_commands(self, delay, fds=None):
333 nextsleep = delay 489 nextsleep = delay
334 if not fds: 490 if not fds:
335 fds = [] 491 fds = []
336 492
337 for function, data in list(self._idlefuns.items()): 493 if not self.idle:
338 try: 494 self.idle = threading.Thread(target=self.idle_thread)
339 retval = function(self, data, False) 495 self.idle.start()
340 if retval is False: 496 elif self.idle and not self.idle.is_alive():
341 del self._idlefuns[function] 497 serverlog("Idle thread terminated, main thread exiting too")
342 nextsleep = None 498 bb.error("Idle thread terminated, main thread exiting too")
343 elif retval is True: 499 self.quit = True
344 nextsleep = None
345 elif isinstance(retval, float) and nextsleep:
346 if (retval < nextsleep):
347 nextsleep = retval
348 elif nextsleep is None:
349 continue
350 else:
351 fds = fds + retval
352 except SystemExit:
353 raise
354 except Exception as exc:
355 if not isinstance(exc, bb.BBHandledException):
356 logger.exception('Running idle function')
357 del self._idlefuns[function]
358 self.quit = True
359
360 # Create new heartbeat event?
361 now = time.time()
362 if now >= self.next_heartbeat:
363 # We might have missed heartbeats. Just trigger once in
364 # that case and continue after the usual delay.
365 self.next_heartbeat += self.heartbeat_seconds
366 if self.next_heartbeat <= now:
367 self.next_heartbeat = now + self.heartbeat_seconds
368 if hasattr(self.cooker, "data"):
369 heartbeat = bb.event.HeartbeatEvent(now)
370 bb.event.fire(heartbeat, self.cooker.data)
371 if nextsleep and now + nextsleep > self.next_heartbeat:
372 # Shorten timeout so that we we wake up in time for
373 # the heartbeat.
374 nextsleep = self.next_heartbeat - now
375 500
376 if nextsleep is not None: 501 if nextsleep is not None:
377 if self.xmlrpc: 502 if self.xmlrpc:
@@ -391,12 +516,18 @@ class ServerCommunicator():
391 self.recv = recv 516 self.recv = recv
392 517
393 def runCommand(self, command): 518 def runCommand(self, command):
394 self.connection.send(command) 519 try:
520 self.connection.send(command)
521 except BrokenPipeError as e:
522 raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
395 if not self.recv.poll(30): 523 if not self.recv.poll(30):
396 logger.info("No reply from server in 30s") 524 logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
397 if not self.recv.poll(30): 525 if not self.recv.poll(30):
398 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)") 526 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
399 ret, exc = self.recv.get() 527 try:
528 ret, exc = self.recv.get()
529 except EOFError as e:
530 raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
400 # Should probably turn all exceptions in exc back into exceptions? 531 # Should probably turn all exceptions in exc back into exceptions?
401 # For now, at least handle BBHandledException 532 # For now, at least handle BBHandledException
402 if exc and ("BBHandledException" in exc or "SystemExit" in exc): 533 if exc and ("BBHandledException" in exc or "SystemExit" in exc):
@@ -429,6 +560,7 @@ class BitBakeProcessServerConnection(object):
429 self.socket_connection = sock 560 self.socket_connection = sock
430 561
431 def terminate(self): 562 def terminate(self):
563 self.events.close()
432 self.socket_connection.close() 564 self.socket_connection.close()
433 self.connection.connection.close() 565 self.connection.connection.close()
434 self.connection.recv.close() 566 self.connection.recv.close()
@@ -439,13 +571,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
439 571
440class BitBakeServer(object): 572class BitBakeServer(object):
441 573
442 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface): 574 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile):
443 575
444 self.server_timeout = server_timeout 576 self.server_timeout = server_timeout
445 self.xmlrpcinterface = xmlrpcinterface 577 self.xmlrpcinterface = xmlrpcinterface
446 self.featureset = featureset 578 self.featureset = featureset
447 self.sockname = sockname 579 self.sockname = sockname
448 self.bitbake_lock = lock 580 self.bitbake_lock = lock
581 self.profile = profile
449 self.readypipe, self.readypipein = os.pipe() 582 self.readypipe, self.readypipein = os.pipe()
450 583
451 # Place the log in the builddirectory alongside the lock file 584 # Place the log in the builddirectory alongside the lock file
@@ -466,7 +599,7 @@ class BitBakeServer(object):
466 try: 599 try:
467 r = ready.get() 600 r = ready.get()
468 except EOFError: 601 except EOFError:
469 # Trap the child exitting/closing the pipe and error out 602 # Trap the child exiting/closing the pipe and error out
470 r = None 603 r = None
471 if not r or r[0] != "r": 604 if not r or r[0] != "r":
472 ready.close() 605 ready.close()
@@ -509,9 +642,9 @@ class BitBakeServer(object):
509 os.set_inheritable(self.bitbake_lock.fileno(), True) 642 os.set_inheritable(self.bitbake_lock.fileno(), True)
510 os.set_inheritable(self.readypipein, True) 643 os.set_inheritable(self.readypipein, True)
511 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") 644 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
512 os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) 645 os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
513 646
514def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface): 647def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
515 648
516 import bb.cookerdata 649 import bb.cookerdata
517 import bb.cooker 650 import bb.cooker
@@ -523,6 +656,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
523 656
524 # Create server control socket 657 # Create server control socket
525 if os.path.exists(sockname): 658 if os.path.exists(sockname):
659 serverlog("WARNING: removing existing socket file '%s'" % sockname)
526 os.unlink(sockname) 660 os.unlink(sockname)
527 661
528 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 662 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -539,7 +673,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
539 writer = ConnectionWriter(readypipeinfd) 673 writer = ConnectionWriter(readypipeinfd)
540 try: 674 try:
541 featureset = [] 675 featureset = []
542 cooker = bb.cooker.BBCooker(featureset, server.register_idle_function) 676 cooker = bb.cooker.BBCooker(featureset, server)
677 cooker.configuration.profile = profile
543 except bb.BBHandledException: 678 except bb.BBHandledException:
544 return None 679 return None
545 writer.send("r") 680 writer.send("r")
@@ -549,7 +684,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
549 684
550 server.run() 685 server.run()
551 finally: 686 finally:
552 # Flush any ,essages/errors to the logfile before exit 687 # Flush any messages/errors to the logfile before exit
553 sys.stdout.flush() 688 sys.stdout.flush()
554 sys.stderr.flush() 689 sys.stderr.flush()
555 690
@@ -654,23 +789,18 @@ class BBUIEventQueue:
654 self.reader = ConnectionReader(readfd) 789 self.reader = ConnectionReader(readfd)
655 790
656 self.t = threading.Thread() 791 self.t = threading.Thread()
657 self.t.setDaemon(True)
658 self.t.run = self.startCallbackHandler 792 self.t.run = self.startCallbackHandler
659 self.t.start() 793 self.t.start()
660 794
661 def getEvent(self): 795 def getEvent(self):
662 self.eventQueueLock.acquire() 796 with bb.utils.lock_timeout(self.eventQueueLock):
663 797 if len(self.eventQueue) == 0:
664 if len(self.eventQueue) == 0: 798 return None
665 self.eventQueueLock.release()
666 return None
667
668 item = self.eventQueue.pop(0)
669 799
670 if len(self.eventQueue) == 0: 800 item = self.eventQueue.pop(0)
671 self.eventQueueNotify.clear() 801 if len(self.eventQueue) == 0:
802 self.eventQueueNotify.clear()
672 803
673 self.eventQueueLock.release()
674 return item 804 return item
675 805
676 def waitEvent(self, delay): 806 def waitEvent(self, delay):
@@ -678,10 +808,9 @@ class BBUIEventQueue:
678 return self.getEvent() 808 return self.getEvent()
679 809
680 def queue_event(self, event): 810 def queue_event(self, event):
681 self.eventQueueLock.acquire() 811 with bb.utils.lock_timeout(self.eventQueueLock):
682 self.eventQueue.append(event) 812 self.eventQueue.append(event)
683 self.eventQueueNotify.set() 813 self.eventQueueNotify.set()
684 self.eventQueueLock.release()
685 814
686 def send_event(self, event): 815 def send_event(self, event):
687 self.queue_event(pickle.loads(event)) 816 self.queue_event(pickle.loads(event))
@@ -690,13 +819,17 @@ class BBUIEventQueue:
690 bb.utils.set_process_name("UIEventQueue") 819 bb.utils.set_process_name("UIEventQueue")
691 while True: 820 while True:
692 try: 821 try:
693 self.reader.wait() 822 ready = self.reader.wait(0.25)
694 event = self.reader.get() 823 if ready:
695 self.queue_event(event) 824 event = self.reader.get()
696 except EOFError: 825 self.queue_event(event)
826 except (EOFError, OSError, TypeError):
697 # Easiest way to exit is to close the file descriptor to cause an exit 827 # Easiest way to exit is to close the file descriptor to cause an exit
698 break 828 break
829
830 def close(self):
699 self.reader.close() 831 self.reader.close()
832 self.t.join()
700 833
701class ConnectionReader(object): 834class ConnectionReader(object):
702 835
@@ -711,7 +844,7 @@ class ConnectionReader(object):
711 return self.reader.poll(timeout) 844 return self.reader.poll(timeout)
712 845
713 def get(self): 846 def get(self):
714 with self.rlock: 847 with bb.utils.lock_timeout(self.rlock):
715 res = self.reader.recv_bytes() 848 res = self.reader.recv_bytes()
716 return multiprocessing.reduction.ForkingPickler.loads(res) 849 return multiprocessing.reduction.ForkingPickler.loads(res)
717 850
@@ -730,10 +863,31 @@ class ConnectionWriter(object):
730 # Why bb.event needs this I have no idea 863 # Why bb.event needs this I have no idea
731 self.event = self 864 self.event = self
732 865
866 def _send(self, obj):
867 gc.disable()
868 with bb.utils.lock_timeout(self.wlock):
869 self.writer.send_bytes(obj)
870 gc.enable()
871
733 def send(self, obj): 872 def send(self, obj):
734 obj = multiprocessing.reduction.ForkingPickler.dumps(obj) 873 obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
735 with self.wlock: 874 # See notes/code in CookerParser
736 self.writer.send_bytes(obj) 875 # We must not terminate holding this lock else processes will hang.
876 # For SIGTERM, raising afterwards avoids this.
877 # For SIGINT, we don't want to have written partial data to the pipe.
878 # pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139
879 process = multiprocessing.current_process()
880 if process and hasattr(process, "queue_signals"):
881 with bb.utils.lock_timeout(process.signal_threadlock):
882 process.queue_signals = True
883 self._send(obj)
884 process.queue_signals = False
885
886 while len(process.signal_received) > 0:
887 sig = process.signal_received.pop()
888 process.handle_sig(sig, None)
889 else:
890 self._send(obj)
737 891
738 def fileno(self): 892 def fileno(self):
739 return self.writer.fileno() 893 return self.writer.fileno()
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 2fa71be667..04b0b17db1 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -11,6 +11,7 @@ import hashlib
11import time 11import time
12import inspect 12import inspect
13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler 13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
14import bb.server.xmlrpcclient
14 15
15import bb 16import bb
16 17
@@ -117,7 +118,7 @@ class BitBakeXMLRPCServerCommands():
117 """ 118 """
118 Run a cooker command on the server 119 Run a cooker command on the server
119 """ 120 """
120 return self.server.cooker.command.runCommand(command, self.server.readonly) 121 return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly)
121 122
122 def getEventHandle(self): 123 def getEventHandle(self):
123 return self.event_handle 124 return self.event_handle
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 0d88c6ec68..8ab08ec961 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -11,6 +13,10 @@ import pickle
11import bb.data 13import bb.data
12import difflib 14import difflib
13import simplediff 15import simplediff
16import json
17import types
18from contextlib import contextmanager
19import bb.compress.zstd
14from bb.checksum import FileChecksumCache 20from bb.checksum import FileChecksumCache
15from bb import runqueue 21from bb import runqueue
16import hashserv 22import hashserv
@@ -19,6 +25,35 @@ import hashserv.client
19logger = logging.getLogger('BitBake.SigGen') 25logger = logging.getLogger('BitBake.SigGen')
20hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') 26hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
21 27
28#find_siginfo and find_siginfo_version are set by the metadata siggen
29# The minimum version of the find_siginfo function we need
30find_siginfo_minversion = 2
31
32HASHSERV_ENVVARS = [
33 "SSL_CERT_DIR",
34 "SSL_CERT_FILE",
35 "NO_PROXY",
36 "HTTPS_PROXY",
37 "HTTP_PROXY"
38]
39
40def check_siggen_version(siggen):
41 if not hasattr(siggen, "find_siginfo_version"):
42 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
43 if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
44 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
45
46class SetEncoder(json.JSONEncoder):
47 def default(self, obj):
48 if isinstance(obj, set) or isinstance(obj, frozenset):
49 return dict(_set_object=list(sorted(obj)))
50 return json.JSONEncoder.default(self, obj)
51
52def SetDecoder(dct):
53 if '_set_object' in dct:
54 return frozenset(dct['_set_object'])
55 return dct
56
22def init(d): 57def init(d):
23 siggens = [obj for obj in globals().values() 58 siggens = [obj for obj in globals().values()
24 if type(obj) is type and issubclass(obj, SignatureGenerator)] 59 if type(obj) is type and issubclass(obj, SignatureGenerator)]
@@ -27,7 +62,6 @@ def init(d):
27 for sg in siggens: 62 for sg in siggens:
28 if desired == sg.name: 63 if desired == sg.name:
29 return sg(d) 64 return sg(d)
30 break
31 else: 65 else:
32 logger.error("Invalid signature generator '%s', using default 'noop'\n" 66 logger.error("Invalid signature generator '%s', using default 'noop'\n"
33 "Available generators: %s", desired, 67 "Available generators: %s", desired,
@@ -39,11 +73,6 @@ class SignatureGenerator(object):
39 """ 73 """
40 name = "noop" 74 name = "noop"
41 75
42 # If the derived class supports multiconfig datacaches, set this to True
43 # The default is False for backward compatibility with derived signature
44 # generators that do not understand multiconfig caches
45 supports_multiconfig_datacaches = False
46
47 def __init__(self, data): 76 def __init__(self, data):
48 self.basehash = {} 77 self.basehash = {}
49 self.taskhash = {} 78 self.taskhash = {}
@@ -61,9 +90,39 @@ class SignatureGenerator(object):
61 def postparsing_clean_cache(self): 90 def postparsing_clean_cache(self):
62 return 91 return
63 92
93 def setup_datacache(self, datacaches):
94 self.datacaches = datacaches
95
96 def setup_datacache_from_datastore(self, mcfn, d):
97 # In task context we have no cache so setup internal data structures
98 # from the fully parsed data store provided
99
100 mc = d.getVar("__BBMULTICONFIG", False) or ""
101 tasks = d.getVar('__BBTASKS', False)
102
103 self.datacaches = {}
104 self.datacaches[mc] = types.SimpleNamespace()
105 setattr(self.datacaches[mc], "stamp", {})
106 self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
107 setattr(self.datacaches[mc], "stamp_extrainfo", {})
108 self.datacaches[mc].stamp_extrainfo[mcfn] = {}
109 for t in tasks:
110 flag = d.getVarFlag(t, "stamp-extra-info")
111 if flag:
112 self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
113
114 def get_cached_unihash(self, tid):
115 return None
116
64 def get_unihash(self, tid): 117 def get_unihash(self, tid):
118 unihash = self.get_cached_unihash(tid)
119 if unihash:
120 return unihash
65 return self.taskhash[tid] 121 return self.taskhash[tid]
66 122
123 def get_unihashes(self, tids):
124 return {tid: self.get_unihash(tid) for tid in tids}
125
67 def prep_taskhash(self, tid, deps, dataCaches): 126 def prep_taskhash(self, tid, deps, dataCaches):
68 return 127 return
69 128
@@ -75,17 +134,51 @@ class SignatureGenerator(object):
75 """Write/update the file checksum cache onto disk""" 134 """Write/update the file checksum cache onto disk"""
76 return 135 return
77 136
137 def stampfile_base(self, mcfn):
138 mc = bb.runqueue.mc_from_tid(mcfn)
139 return self.datacaches[mc].stamp[mcfn]
140
141 def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
142 mc = bb.runqueue.mc_from_tid(mcfn)
143 stamp = self.datacaches[mc].stamp[mcfn]
144 if not stamp:
145 return
146
147 stamp_extrainfo = ""
148 if extrainfo:
149 taskflagname = taskname
150 if taskname.endswith("_setscene"):
151 taskflagname = taskname.replace("_setscene", "")
152 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
153
154 return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
155
78 def stampfile(self, stampbase, file_name, taskname, extrainfo): 156 def stampfile(self, stampbase, file_name, taskname, extrainfo):
79 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 157 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
80 158
159 def stampcleanmask_mcfn(self, taskname, mcfn):
160 mc = bb.runqueue.mc_from_tid(mcfn)
161 stamp = self.datacaches[mc].stamp[mcfn]
162 if not stamp:
163 return []
164
165 taskflagname = taskname
166 if taskname.endswith("_setscene"):
167 taskflagname = taskname.replace("_setscene", "")
168 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
169
170 return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
171
81 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): 172 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
82 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 173 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
83 174
84 def dump_sigtask(self, fn, task, stampbase, runtime): 175 def dump_sigtask(self, mcfn, task, stampbase, runtime):
85 return 176 return
86 177
87 def invalidate_task(self, task, d, fn): 178 def invalidate_task(self, task, mcfn):
88 bb.build.del_stamp(task, d, fn) 179 mc = bb.runqueue.mc_from_tid(mcfn)
180 stamp = self.datacaches[mc].stamp[mcfn]
181 bb.utils.remove(stamp)
89 182
90 def dump_sigs(self, dataCache, options): 183 def dump_sigs(self, dataCache, options):
91 return 184 return
@@ -108,40 +201,19 @@ class SignatureGenerator(object):
108 def save_unitaskhashes(self): 201 def save_unitaskhashes(self):
109 return 202 return
110 203
111 def set_setscene_tasks(self, setscene_tasks): 204 def copy_unitaskhashes(self, targetdir):
112 return 205 return
113 206
114 @classmethod 207 def set_setscene_tasks(self, setscene_tasks):
115 def get_data_caches(cls, dataCaches, mc): 208 return
116 """
117 This function returns the datacaches that should be passed to signature
118 generator functions. If the signature generator supports multiconfig
119 caches, the entire dictionary of data caches is sent, otherwise a
120 special proxy is sent that support both index access to all
121 multiconfigs, and also direct access for the default multiconfig.
122
123 The proxy class allows code in this class itself to always use
124 multiconfig aware code (to ease maintenance), but derived classes that
125 are unaware of multiconfig data caches can still access the default
126 multiconfig as expected.
127
128 Do not override this function in derived classes; it will be removed in
129 the future when support for multiconfig data caches is mandatory
130 """
131 class DataCacheProxy(object):
132 def __init__(self):
133 pass
134
135 def __getitem__(self, key):
136 return dataCaches[key]
137
138 def __getattr__(self, name):
139 return getattr(dataCaches[mc], name)
140 209
141 if cls.supports_multiconfig_datacaches: 210 def exit(self):
142 return dataCaches 211 return
143 212
144 return DataCacheProxy() 213def build_pnid(mc, pn, taskname):
214 if mc:
215 return "mc:" + mc + ":" + pn + ":" + taskname
216 return pn + ":" + taskname
145 217
146class SignatureGeneratorBasic(SignatureGenerator): 218class SignatureGeneratorBasic(SignatureGenerator):
147 """ 219 """
@@ -152,15 +224,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
152 self.basehash = {} 224 self.basehash = {}
153 self.taskhash = {} 225 self.taskhash = {}
154 self.unihash = {} 226 self.unihash = {}
155 self.taskdeps = {}
156 self.runtaskdeps = {} 227 self.runtaskdeps = {}
157 self.file_checksum_values = {} 228 self.file_checksum_values = {}
158 self.taints = {} 229 self.taints = {}
159 self.gendeps = {}
160 self.lookupcache = {}
161 self.setscenetasks = set() 230 self.setscenetasks = set()
162 self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) 231 self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
163 self.taskwhitelist = None 232 self.taskhash_ignore_tasks = None
164 self.init_rundepcheck(data) 233 self.init_rundepcheck(data)
165 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") 234 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
166 if checksum_cache_file: 235 if checksum_cache_file:
@@ -175,21 +244,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
175 self.tidtopn = {} 244 self.tidtopn = {}
176 245
177 def init_rundepcheck(self, data): 246 def init_rundepcheck(self, data):
178 self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None 247 self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
179 if self.taskwhitelist: 248 if self.taskhash_ignore_tasks:
180 self.twl = re.compile(self.taskwhitelist) 249 self.twl = re.compile(self.taskhash_ignore_tasks)
181 else: 250 else:
182 self.twl = None 251 self.twl = None
183 252
184 def _build_data(self, fn, d): 253 def _build_data(self, mcfn, d):
185 254
186 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') 255 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
187 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist) 256 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
188 257
189 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) 258 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
190 259
191 for task in tasklist: 260 for task in tasklist:
192 tid = fn + ":" + task 261 tid = mcfn + ":" + task
193 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: 262 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
194 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) 263 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
195 bb.error("The following commands may help:") 264 bb.error("The following commands may help:")
@@ -200,11 +269,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
200 bb.error("%s -Sprintdiff\n" % cmd) 269 bb.error("%s -Sprintdiff\n" % cmd)
201 self.basehash[tid] = basehash[tid] 270 self.basehash[tid] = basehash[tid]
202 271
203 self.taskdeps[fn] = taskdeps 272 return taskdeps, gendeps, lookupcache
204 self.gendeps[fn] = gendeps
205 self.lookupcache[fn] = lookupcache
206
207 return taskdeps
208 273
209 def set_setscene_tasks(self, setscene_tasks): 274 def set_setscene_tasks(self, setscene_tasks):
210 self.setscenetasks = set(setscene_tasks) 275 self.setscenetasks = set(setscene_tasks)
@@ -212,35 +277,47 @@ class SignatureGeneratorBasic(SignatureGenerator):
212 def finalise(self, fn, d, variant): 277 def finalise(self, fn, d, variant):
213 278
214 mc = d.getVar("__BBMULTICONFIG", False) or "" 279 mc = d.getVar("__BBMULTICONFIG", False) or ""
280 mcfn = fn
215 if variant or mc: 281 if variant or mc:
216 fn = bb.cache.realfn2virtual(fn, variant, mc) 282 mcfn = bb.cache.realfn2virtual(fn, variant, mc)
217 283
218 try: 284 try:
219 taskdeps = self._build_data(fn, d) 285 taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
220 except bb.parse.SkipRecipe: 286 except bb.parse.SkipRecipe:
221 raise 287 raise
222 except: 288 except:
223 bb.warn("Error during finalise of %s" % fn) 289 bb.warn("Error during finalise of %s" % mcfn)
224 raise 290 raise
225 291
292 basehashes = {}
293 for task in taskdeps:
294 basehashes[task] = self.basehash[mcfn + ":" + task]
295
296 d.setVar("__siggen_basehashes", basehashes)
297 d.setVar("__siggen_gendeps", gendeps)
298 d.setVar("__siggen_varvals", lookupcache)
299 d.setVar("__siggen_taskdeps", taskdeps)
300
226 #Slow but can be useful for debugging mismatched basehashes 301 #Slow but can be useful for debugging mismatched basehashes
227 #for task in self.taskdeps[fn]: 302 #self.setup_datacache_from_datastore(mcfn, d)
228 # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) 303 #for task in taskdeps:
304 # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
229 305
230 for task in taskdeps: 306 def setup_datacache_from_datastore(self, mcfn, d):
231 d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task]) 307 super().setup_datacache_from_datastore(mcfn, d)
232 308
233 def postparsing_clean_cache(self): 309 mc = bb.runqueue.mc_from_tid(mcfn)
234 # 310 for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
235 # After parsing we can remove some things from memory to reduce our memory footprint 311 if not hasattr(self.datacaches[mc], attr):
236 # 312 setattr(self.datacaches[mc], attr, {})
237 self.gendeps = {} 313 self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
238 self.lookupcache = {} 314 self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
239 self.taskdeps = {} 315 self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
240 316
241 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): 317 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
242 # Return True if we should keep the dependency, False to drop it 318 # Return True if we should keep the dependency, False to drop it
243 # We only manipulate the dependencies for packages not in the whitelist 319 # We only manipulate the dependencies for packages not in the ignore
320 # list
244 if self.twl and not self.twl.search(recipename): 321 if self.twl and not self.twl.search(recipename):
245 # then process the actual dependencies 322 # then process the actual dependencies
246 if self.twl.search(depname): 323 if self.twl.search(depname):
@@ -258,38 +335,37 @@ class SignatureGeneratorBasic(SignatureGenerator):
258 335
259 def prep_taskhash(self, tid, deps, dataCaches): 336 def prep_taskhash(self, tid, deps, dataCaches):
260 337
261 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) 338 (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
262 339
263 self.basehash[tid] = dataCaches[mc].basetaskhash[tid] 340 self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
264 self.runtaskdeps[tid] = [] 341 self.runtaskdeps[tid] = []
265 self.file_checksum_values[tid] = [] 342 self.file_checksum_values[tid] = []
266 recipename = dataCaches[mc].pkg_fn[fn] 343 recipename = dataCaches[mc].pkg_fn[mcfn]
267 344
268 self.tidtopn[tid] = recipename 345 self.tidtopn[tid] = recipename
346 # save hashfn for deps into siginfo?
347 for dep in deps:
348 (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
349 dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
269 350
270 for dep in sorted(deps, key=clean_basepath): 351 if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
271 (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
272 depname = dataCaches[depmc].pkg_fn[depmcfn]
273 if not self.supports_multiconfig_datacaches and mc != depmc:
274 # If the signature generator doesn't understand multiconfig
275 # data caches, any dependency not in the same multiconfig must
276 # be skipped for backward compatibility
277 continue
278 if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
279 continue 352 continue
353
280 if dep not in self.taskhash: 354 if dep not in self.taskhash:
281 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) 355 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
282 self.runtaskdeps[tid].append(dep)
283 356
284 if task in dataCaches[mc].file_checksums[fn]: 357 dep_pnid = build_pnid(depmc, dep_pn, deptask)
358 self.runtaskdeps[tid].append((dep_pnid, dep))
359
360 if task in dataCaches[mc].file_checksums[mcfn]:
285 if self.checksum_cache: 361 if self.checksum_cache:
286 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 362 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
287 else: 363 else:
288 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 364 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
289 for (f,cs) in checksums: 365 for (f,cs) in checksums:
290 self.file_checksum_values[tid].append((f,cs)) 366 self.file_checksum_values[tid].append((f,cs))
291 367
292 taskdep = dataCaches[mc].task_deps[fn] 368 taskdep = dataCaches[mc].task_deps[mcfn]
293 if 'nostamp' in taskdep and task in taskdep['nostamp']: 369 if 'nostamp' in taskdep and task in taskdep['nostamp']:
294 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run 370 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
295 if tid in self.taints and self.taints[tid].startswith("nostamp:"): 371 if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -300,7 +376,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
300 taint = str(uuid.uuid4()) 376 taint = str(uuid.uuid4())
301 self.taints[tid] = "nostamp:" + taint 377 self.taints[tid] = "nostamp:" + taint
302 378
303 taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) 379 taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
304 if taint: 380 if taint:
305 self.taints[tid] = taint 381 self.taints[tid] = taint
306 logger.warning("%s is tainted from a forced run" % tid) 382 logger.warning("%s is tainted from a forced run" % tid)
@@ -310,22 +386,24 @@ class SignatureGeneratorBasic(SignatureGenerator):
310 def get_taskhash(self, tid, deps, dataCaches): 386 def get_taskhash(self, tid, deps, dataCaches):
311 387
312 data = self.basehash[tid] 388 data = self.basehash[tid]
313 for dep in self.runtaskdeps[tid]: 389 for dep in sorted(self.runtaskdeps[tid]):
314 data = data + self.get_unihash(dep) 390 data += self.get_unihash(dep[1])
315 391
316 for (f, cs) in self.file_checksum_values[tid]: 392 for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
317 if cs: 393 if cs:
318 data = data + cs 394 if "/./" in f:
395 data += "./" + f.split("/./")[1]
396 data += cs
319 397
320 if tid in self.taints: 398 if tid in self.taints:
321 if self.taints[tid].startswith("nostamp:"): 399 if self.taints[tid].startswith("nostamp:"):
322 data = data + self.taints[tid][8:] 400 data += self.taints[tid][8:]
323 else: 401 else:
324 data = data + self.taints[tid] 402 data += self.taints[tid]
325 403
326 h = hashlib.sha256(data.encode("utf-8")).hexdigest() 404 h = hashlib.sha256(data.encode("utf-8")).hexdigest()
327 self.taskhash[tid] = h 405 self.taskhash[tid] = h
328 #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) 406 #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
329 return h 407 return h
330 408
331 def writeout_file_checksum_cache(self): 409 def writeout_file_checksum_cache(self):
@@ -340,9 +418,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
340 def save_unitaskhashes(self): 418 def save_unitaskhashes(self):
341 self.unihash_cache.save(self.unitaskhashes) 419 self.unihash_cache.save(self.unitaskhashes)
342 420
343 def dump_sigtask(self, fn, task, stampbase, runtime): 421 def copy_unitaskhashes(self, targetdir):
422 self.unihash_cache.copyfile(targetdir)
344 423
345 tid = fn + ":" + task 424 def dump_sigtask(self, mcfn, task, stampbase, runtime):
425 tid = mcfn + ":" + task
426 mc = bb.runqueue.mc_from_tid(mcfn)
346 referencestamp = stampbase 427 referencestamp = stampbase
347 if isinstance(runtime, str) and runtime.startswith("customfile"): 428 if isinstance(runtime, str) and runtime.startswith("customfile"):
348 sigfile = stampbase 429 sigfile = stampbase
@@ -357,29 +438,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
357 438
358 data = {} 439 data = {}
359 data['task'] = task 440 data['task'] = task
360 data['basewhitelist'] = self.basewhitelist 441 data['basehash_ignore_vars'] = self.basehash_ignore_vars
361 data['taskwhitelist'] = self.taskwhitelist 442 data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
362 data['taskdeps'] = self.taskdeps[fn][task] 443 data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
363 data['basehash'] = self.basehash[tid] 444 data['basehash'] = self.basehash[tid]
364 data['gendeps'] = {} 445 data['gendeps'] = {}
365 data['varvals'] = {} 446 data['varvals'] = {}
366 data['varvals'][task] = self.lookupcache[fn][task] 447 data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
367 for dep in self.taskdeps[fn][task]: 448 for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
368 if dep in self.basewhitelist: 449 if dep in self.basehash_ignore_vars:
369 continue 450 continue
370 data['gendeps'][dep] = self.gendeps[fn][dep] 451 data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
371 data['varvals'][dep] = self.lookupcache[fn][dep] 452 data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
372 453
373 if runtime and tid in self.taskhash: 454 if runtime and tid in self.taskhash:
374 data['runtaskdeps'] = self.runtaskdeps[tid] 455 data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
375 data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]] 456 data['file_checksum_values'] = []
457 for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
458 if "/./" in f:
459 data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
460 else:
461 data['file_checksum_values'].append((os.path.basename(f), cs))
376 data['runtaskhashes'] = {} 462 data['runtaskhashes'] = {}
377 for dep in data['runtaskdeps']: 463 for dep in self.runtaskdeps[tid]:
378 data['runtaskhashes'][dep] = self.get_unihash(dep) 464 data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
379 data['taskhash'] = self.taskhash[tid] 465 data['taskhash'] = self.taskhash[tid]
380 data['unihash'] = self.get_unihash(tid) 466 data['unihash'] = self.get_unihash(tid)
381 467
382 taint = self.read_taint(fn, task, referencestamp) 468 taint = self.read_taint(mcfn, task, referencestamp)
383 if taint: 469 if taint:
384 data['taint'] = taint 470 data['taint'] = taint
385 471
@@ -396,13 +482,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
396 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) 482 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
397 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) 483 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
398 484
399 fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") 485 fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
400 try: 486 try:
401 with os.fdopen(fd, "wb") as stream: 487 with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
402 p = pickle.dump(data, stream, -1) 488 json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
403 stream.flush() 489 f.flush()
404 os.chmod(tmpfile, 0o664) 490 os.chmod(tmpfile, 0o664)
405 os.rename(tmpfile, sigfile) 491 bb.utils.rename(tmpfile, sigfile)
406 except (OSError, IOError) as err: 492 except (OSError, IOError) as err:
407 try: 493 try:
408 os.unlink(tmpfile) 494 os.unlink(tmpfile)
@@ -410,18 +496,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
410 pass 496 pass
411 raise err 497 raise err
412 498
413 def dump_sigfn(self, fn, dataCaches, options):
414 if fn in self.taskdeps:
415 for task in self.taskdeps[fn]:
416 tid = fn + ":" + task
417 mc = bb.runqueue.mc_from_tid(tid)
418 if tid not in self.taskhash:
419 continue
420 if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
421 bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
422 bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
423 self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
424
425class SignatureGeneratorBasicHash(SignatureGeneratorBasic): 499class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
426 name = "basichash" 500 name = "basichash"
427 501
@@ -432,11 +506,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
432 # If task is not in basehash, then error 506 # If task is not in basehash, then error
433 return self.basehash[tid] 507 return self.basehash[tid]
434 508
435 def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): 509 def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
436 if taskname != "do_setscene" and taskname.endswith("_setscene"): 510 if taskname.endswith("_setscene"):
437 tid = fn + ":" + taskname[:-9] 511 tid = mcfn + ":" + taskname[:-9]
438 else: 512 else:
439 tid = fn + ":" + taskname 513 tid = mcfn + ":" + taskname
440 if clean: 514 if clean:
441 h = "*" 515 h = "*"
442 else: 516 else:
@@ -444,29 +518,106 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
444 518
445 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') 519 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
446 520
447 def stampcleanmask(self, stampbase, fn, taskname, extrainfo): 521 def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
448 return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True) 522 return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
523
524 def invalidate_task(self, task, mcfn):
525 bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
449 526
450 def invalidate_task(self, task, d, fn): 527 mc = bb.runqueue.mc_from_tid(mcfn)
451 bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task)) 528 stamp = self.datacaches[mc].stamp[mcfn]
452 bb.build.write_taint(task, d, fn) 529
530 taintfn = stamp + '.' + task + '.taint'
531
532 import uuid
533 bb.utils.mkdirhier(os.path.dirname(taintfn))
534 # The specific content of the taint file is not really important,
535 # we just need it to be random, so a random UUID is used
536 with open(taintfn, 'w') as taintf:
537 taintf.write(str(uuid.uuid4()))
453 538
454class SignatureGeneratorUniHashMixIn(object): 539class SignatureGeneratorUniHashMixIn(object):
455 def __init__(self, data): 540 def __init__(self, data):
456 self.extramethod = {} 541 self.extramethod = {}
542 # NOTE: The cache only tracks hashes that exist. Hashes that don't
543 # exist are always queries from the server since it is possible for
544 # hashes to appear over time, but much less likely for them to
545 # disappear
546 self.unihash_exists_cache = set()
547 self.username = None
548 self.password = None
549 self.env = {}
550
551 origenv = data.getVar("BB_ORIGENV")
552 for e in HASHSERV_ENVVARS:
553 value = data.getVar(e)
554 if not value and origenv:
555 value = origenv.getVar(e)
556 if value:
557 self.env[e] = value
457 super().__init__(data) 558 super().__init__(data)
458 559
459 def get_taskdata(self): 560 def get_taskdata(self):
460 return (self.server, self.method, self.extramethod) + super().get_taskdata() 561 return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata()
461 562
462 def set_taskdata(self, data): 563 def set_taskdata(self, data):
463 self.server, self.method, self.extramethod = data[:3] 564 self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7]
464 super().set_taskdata(data[3:]) 565 super().set_taskdata(data[7:])
566
567 def get_hashserv_creds(self):
568 if self.username and self.password:
569 return {
570 "username": self.username,
571 "password": self.password,
572 }
465 573
574 return {}
575
576 @contextmanager
577 def _client_env(self):
578 orig_env = os.environ.copy()
579 try:
580 for k, v in self.env.items():
581 os.environ[k] = v
582
583 yield
584 finally:
585 for k, v in self.env.items():
586 if k in orig_env:
587 os.environ[k] = orig_env[k]
588 else:
589 del os.environ[k]
590
591 @contextmanager
466 def client(self): 592 def client(self):
467 if getattr(self, '_client', None) is None: 593 with self._client_env():
468 self._client = hashserv.create_client(self.server) 594 if getattr(self, '_client', None) is None:
469 return self._client 595 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
596 yield self._client
597
598 @contextmanager
599 def client_pool(self):
600 with self._client_env():
601 if getattr(self, '_client_pool', None) is None:
602 self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds())
603 yield self._client_pool
604
605 def reset(self, data):
606 self.__close_clients()
607 return super().reset(data)
608
609 def exit(self):
610 self.__close_clients()
611 return super().exit()
612
613 def __close_clients(self):
614 with self._client_env():
615 if getattr(self, '_client', None) is not None:
616 self._client.close()
617 self._client = None
618 if getattr(self, '_client_pool', None) is not None:
619 self._client_pool.close()
620 self._client_pool = None
470 621
471 def get_stampfile_hash(self, tid): 622 def get_stampfile_hash(self, tid):
472 if tid in self.taskhash: 623 if tid in self.taskhash:
@@ -499,7 +650,7 @@ class SignatureGeneratorUniHashMixIn(object):
499 return None 650 return None
500 return unihash 651 return unihash
501 652
502 def get_unihash(self, tid): 653 def get_cached_unihash(self, tid):
503 taskhash = self.taskhash[tid] 654 taskhash = self.taskhash[tid]
504 655
505 # If its not a setscene task we can return 656 # If its not a setscene task we can return
@@ -514,40 +665,105 @@ class SignatureGeneratorUniHashMixIn(object):
514 self.unihash[tid] = unihash 665 self.unihash[tid] = unihash
515 return unihash 666 return unihash
516 667
517 # In the absence of being able to discover a unique hash from the 668 return None
518 # server, make it be equivalent to the taskhash. The unique "hash" only
519 # really needs to be a unique string (not even necessarily a hash), but
520 # making it match the taskhash has a few advantages:
521 #
522 # 1) All of the sstate code that assumes hashes can be the same
523 # 2) It provides maximal compatibility with builders that don't use
524 # an equivalency server
525 # 3) The value is easy for multiple independent builders to derive the
526 # same unique hash from the same input. This means that if the
527 # independent builders find the same taskhash, but it isn't reported
528 # to the server, there is a better chance that they will agree on
529 # the unique hash.
530 unihash = taskhash
531 669
532 try: 670 def _get_method(self, tid):
533 method = self.method 671 method = self.method
534 if tid in self.extramethod: 672 if tid in self.extramethod:
535 method = method + self.extramethod[tid] 673 method = method + self.extramethod[tid]
536 data = self.client().get_unihash(method, self.taskhash[tid]) 674
537 if data: 675 return method
538 unihash = data 676
677 def unihashes_exist(self, query):
678 if len(query) == 0:
679 return {}
680
681 uncached_query = {}
682 result = {}
683 for key, unihash in query.items():
684 if unihash in self.unihash_exists_cache:
685 result[key] = True
686 else:
687 uncached_query[key] = unihash
688
689 if self.max_parallel <= 1 or len(uncached_query) <= 1:
690 # No parallelism required. Make the query serially with the single client
691 with self.client() as client:
692 uncached_result = {
693 key: client.unihash_exists(value) for key, value in uncached_query.items()
694 }
695 else:
696 with self.client_pool() as client_pool:
697 uncached_result = client_pool.unihashes_exist(uncached_query)
698
699 for key, exists in uncached_result.items():
700 if exists:
701 self.unihash_exists_cache.add(query[key])
702 result[key] = exists
703
704 return result
705
706 def get_unihash(self, tid):
707 return self.get_unihashes([tid])[tid]
708
709 def get_unihashes(self, tids):
710 """
711 For a iterable of tids, returns a dictionary that maps each tid to a
712 unihash
713 """
714 result = {}
715 queries = {}
716 query_result = {}
717
718 for tid in tids:
719 unihash = self.get_cached_unihash(tid)
720 if unihash:
721 result[tid] = unihash
722 else:
723 queries[tid] = (self._get_method(tid), self.taskhash[tid])
724
725 if len(queries) == 0:
726 return result
727
728 if self.max_parallel <= 1 or len(queries) <= 1:
729 # No parallelism required. Make the query serially with the single client
730 with self.client() as client:
731 for tid, args in queries.items():
732 query_result[tid] = client.get_unihash(*args)
733 else:
734 with self.client_pool() as client_pool:
735 query_result = client_pool.get_unihashes(queries)
736
737 for tid, unihash in query_result.items():
738 # In the absence of being able to discover a unique hash from the
739 # server, make it be equivalent to the taskhash. The unique "hash" only
740 # really needs to be a unique string (not even necessarily a hash), but
741 # making it match the taskhash has a few advantages:
742 #
743 # 1) All of the sstate code that assumes hashes can be the same
744 # 2) It provides maximal compatibility with builders that don't use
745 # an equivalency server
746 # 3) The value is easy for multiple independent builders to derive the
747 # same unique hash from the same input. This means that if the
748 # independent builders find the same taskhash, but it isn't reported
749 # to the server, there is a better chance that they will agree on
750 # the unique hash.
751 taskhash = self.taskhash[tid]
752 if unihash:
539 # A unique hash equal to the taskhash is not very interesting, 753 # A unique hash equal to the taskhash is not very interesting,
540 # so it is reported it at debug level 2. If they differ, that 754 # so it is reported it at debug level 2. If they differ, that
541 # is much more interesting, so it is reported at debug level 1 755 # is much more interesting, so it is reported at debug level 1
542 hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) 756 hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
543 else: 757 else:
544 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 758 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
545 except hashserv.client.HashConnectionError as e: 759 unihash = taskhash
546 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
547 760
548 self.set_unihash(tid, unihash) 761
549 self.unihash[tid] = unihash 762 self.set_unihash(tid, unihash)
550 return unihash 763 self.unihash[tid] = unihash
764 result[tid] = unihash
765
766 return result
551 767
552 def report_unihash(self, path, task, d): 768 def report_unihash(self, path, task, d):
553 import importlib 769 import importlib
@@ -556,14 +772,14 @@ class SignatureGeneratorUniHashMixIn(object):
556 unihash = d.getVar('BB_UNIHASH') 772 unihash = d.getVar('BB_UNIHASH')
557 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' 773 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
558 tempdir = d.getVar('T') 774 tempdir = d.getVar('T')
559 fn = d.getVar('BB_FILENAME') 775 mcfn = d.getVar('BB_FILENAME')
560 tid = fn + ':do_' + task 776 tid = mcfn + ':do_' + task
561 key = tid + ':' + taskhash 777 key = tid + ':' + taskhash
562 778
563 if self.setscenetasks and tid not in self.setscenetasks: 779 if self.setscenetasks and tid not in self.setscenetasks:
564 return 780 return
565 781
566 # This can happen if locked sigs are in action. Detect and just abort 782 # This can happen if locked sigs are in action. Detect and just exit
567 if taskhash != self.taskhash[tid]: 783 if taskhash != self.taskhash[tid]:
568 return 784 return
569 785
@@ -611,17 +827,19 @@ class SignatureGeneratorUniHashMixIn(object):
611 if tid in self.extramethod: 827 if tid in self.extramethod:
612 method = method + self.extramethod[tid] 828 method = method + self.extramethod[tid]
613 829
614 data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) 830 with self.client() as client:
831 data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
832
615 new_unihash = data['unihash'] 833 new_unihash = data['unihash']
616 834
617 if new_unihash != unihash: 835 if new_unihash != unihash:
618 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) 836 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
619 bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) 837 bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
620 self.set_unihash(tid, new_unihash) 838 self.set_unihash(tid, new_unihash)
621 d.setVar('BB_UNIHASH', new_unihash) 839 d.setVar('BB_UNIHASH', new_unihash)
622 else: 840 else:
623 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 841 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
624 except hashserv.client.HashConnectionError as e: 842 except ConnectionError as e:
625 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 843 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
626 finally: 844 finally:
627 if sigfile: 845 if sigfile:
@@ -642,7 +860,9 @@ class SignatureGeneratorUniHashMixIn(object):
642 if tid in self.extramethod: 860 if tid in self.extramethod:
643 method = method + self.extramethod[tid] 861 method = method + self.extramethod[tid]
644 862
645 data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) 863 with self.client() as client:
864 data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
865
646 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) 866 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
647 867
648 if data is None: 868 if data is None:
@@ -661,7 +881,7 @@ class SignatureGeneratorUniHashMixIn(object):
661 # TODO: What to do here? 881 # TODO: What to do here?
662 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 882 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
663 883
664 except hashserv.client.HashConnectionError as e: 884 except ConnectionError as e:
665 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 885 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
666 886
667 return False 887 return False
@@ -675,20 +895,20 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
675 super().init_rundepcheck(data) 895 super().init_rundepcheck(data)
676 self.server = data.getVar('BB_HASHSERVE') 896 self.server = data.getVar('BB_HASHSERVE')
677 self.method = "sstate_output_hash" 897 self.method = "sstate_output_hash"
898 self.max_parallel = 1
678 899
679# 900def clean_checksum_file_path(file_checksum_tuple):
680# Dummy class used for bitbake-selftest 901 f, cs = file_checksum_tuple
681# 902 if "/./" in f:
682class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): 903 return "./" + f.split("/./")[1]
683 name = "TestMulticonfigDepends" 904 return f
684 supports_multiconfig_datacaches = True
685 905
686def dump_this_task(outfile, d): 906def dump_this_task(outfile, d):
687 import bb.parse 907 import bb.parse
688 fn = d.getVar("BB_FILENAME") 908 mcfn = d.getVar("BB_FILENAME")
689 task = "do_" + d.getVar("BB_CURRENTTASK") 909 task = "do_" + d.getVar("BB_CURRENTTASK")
690 referencestamp = bb.build.stamp_internal(task, d, None, True) 910 referencestamp = bb.parse.siggen.stampfile_base(mcfn)
691 bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) 911 bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
692 912
693def init_colors(enable_color): 913def init_colors(enable_color):
694 """Initialise colour dict for passing to compare_sigfiles()""" 914 """Initialise colour dict for passing to compare_sigfiles()"""
@@ -741,38 +961,15 @@ def list_inline_diff(oldlist, newlist, colors=None):
741 ret.append(item) 961 ret.append(item)
742 return '[%s]' % (', '.join(ret)) 962 return '[%s]' % (', '.join(ret))
743 963
744def clean_basepath(basepath): 964# Handled renamed fields
745 basepath, dir, recipe_task = basepath.rsplit("/", 2) 965def handle_renames(data):
746 cleaned = dir + '/' + recipe_task 966 if 'basewhitelist' in data:
747 967 data['basehash_ignore_vars'] = data['basewhitelist']
748 if basepath[0] == '/': 968 del data['basewhitelist']
749 return cleaned 969 if 'taskwhitelist' in data:
750 970 data['taskhash_ignore_tasks'] = data['taskwhitelist']
751 if basepath.startswith("mc:") and basepath.count(':') >= 2: 971 del data['taskwhitelist']
752 mc, mc_name, basepath = basepath.split(":", 2)
753 mc_suffix = ':mc:' + mc_name
754 else:
755 mc_suffix = ''
756
757 # mc stuff now removed from basepath. Whatever was next, if present will be the first
758 # suffix. ':/', recipe path start, marks the end of this. Something like
759 # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
760 if basepath[0] != '/':
761 cleaned += ':' + basepath.split(':/', 1)[0]
762
763 return cleaned + mc_suffix
764 972
765def clean_basepaths(a):
766 b = {}
767 for x in a:
768 b[clean_basepath(x)] = a[x]
769 return b
770
771def clean_basepaths_list(a):
772 b = []
773 for x in a:
774 b.append(clean_basepath(x))
775 return b
776 973
777def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): 974def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
778 output = [] 975 output = []
@@ -794,20 +991,29 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
794 formatparams.update(values) 991 formatparams.update(values)
795 return formatstr.format(**formatparams) 992 return formatstr.format(**formatparams)
796 993
797 with open(a, 'rb') as f: 994 try:
798 p1 = pickle.Unpickler(f) 995 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
799 a_data = p1.load() 996 a_data = json.load(f, object_hook=SetDecoder)
800 with open(b, 'rb') as f: 997 except (TypeError, OSError) as err:
801 p2 = pickle.Unpickler(f) 998 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
802 b_data = p2.load() 999 raise err
803 1000 try:
804 def dict_diff(a, b, whitelist=set()): 1001 with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
1002 b_data = json.load(f, object_hook=SetDecoder)
1003 except (TypeError, OSError) as err:
1004 bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
1005 raise err
1006
1007 for data in [a_data, b_data]:
1008 handle_renames(data)
1009
1010 def dict_diff(a, b, ignored_vars=set()):
805 sa = set(a.keys()) 1011 sa = set(a.keys())
806 sb = set(b.keys()) 1012 sb = set(b.keys())
807 common = sa & sb 1013 common = sa & sb
808 changed = set() 1014 changed = set()
809 for i in common: 1015 for i in common:
810 if a[i] != b[i] and i not in whitelist: 1016 if a[i] != b[i] and i not in ignored_vars:
811 changed.add(i) 1017 changed.add(i)
812 added = sb - sa 1018 added = sb - sa
813 removed = sa - sb 1019 removed = sa - sb
@@ -815,11 +1021,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
815 1021
816 def file_checksums_diff(a, b): 1022 def file_checksums_diff(a, b):
817 from collections import Counter 1023 from collections import Counter
818 # Handle old siginfo format 1024
819 if isinstance(a, dict): 1025 # Convert lists back to tuples
820 a = [(os.path.basename(f), cs) for f, cs in a.items()] 1026 a = [(f[0], f[1]) for f in a]
821 if isinstance(b, dict): 1027 b = [(f[0], f[1]) for f in b]
822 b = [(os.path.basename(f), cs) for f, cs in b.items()] 1028
823 # Compare lists, ensuring we can handle duplicate filenames if they exist 1029 # Compare lists, ensuring we can handle duplicate filenames if they exist
824 removedcount = Counter(a) 1030 removedcount = Counter(a)
825 removedcount.subtract(b) 1031 removedcount.subtract(b)
@@ -846,15 +1052,15 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
846 removed = [x[0] for x in removed] 1052 removed = [x[0] for x in removed]
847 return changed, added, removed 1053 return changed, added, removed
848 1054
849 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: 1055 if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
850 output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist'])) 1056 output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
851 if a_data['basewhitelist'] and b_data['basewhitelist']: 1057 if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
852 output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])) 1058 output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
853 1059
854 if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']: 1060 if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
855 output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist'])) 1061 output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
856 if a_data['taskwhitelist'] and b_data['taskwhitelist']: 1062 if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
857 output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])) 1063 output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
858 1064
859 if a_data['taskdeps'] != b_data['taskdeps']: 1065 if a_data['taskdeps'] != b_data['taskdeps']:
860 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) 1066 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
@@ -862,23 +1068,23 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
862 if a_data['basehash'] != b_data['basehash'] and not collapsed: 1068 if a_data['basehash'] != b_data['basehash'] and not collapsed:
863 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) 1069 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
864 1070
865 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist']) 1071 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
866 if changed: 1072 if changed:
867 for dep in changed: 1073 for dep in sorted(changed):
868 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) 1074 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
869 if a_data['gendeps'][dep] and b_data['gendeps'][dep]: 1075 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
870 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) 1076 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
871 if added: 1077 if added:
872 for dep in added: 1078 for dep in sorted(added):
873 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) 1079 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
874 if removed: 1080 if removed:
875 for dep in removed: 1081 for dep in sorted(removed):
876 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) 1082 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
877 1083
878 1084
879 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) 1085 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
880 if changed: 1086 if changed:
881 for dep in changed: 1087 for dep in sorted(changed):
882 oldval = a_data['varvals'][dep] 1088 oldval = a_data['varvals'][dep]
883 newval = b_data['varvals'][dep] 1089 newval = b_data['varvals'][dep]
884 if newval and oldval and ('\n' in oldval or '\n' in newval): 1090 if newval and oldval and ('\n' in oldval or '\n' in newval):
@@ -902,9 +1108,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
902 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) 1108 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
903 1109
904 if not 'file_checksum_values' in a_data: 1110 if not 'file_checksum_values' in a_data:
905 a_data['file_checksum_values'] = {} 1111 a_data['file_checksum_values'] = []
906 if not 'file_checksum_values' in b_data: 1112 if not 'file_checksum_values' in b_data:
907 b_data['file_checksum_values'] = {} 1113 b_data['file_checksum_values'] = []
908 1114
909 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) 1115 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
910 if changed: 1116 if changed:
@@ -931,11 +1137,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
931 a = a_data['runtaskdeps'][idx] 1137 a = a_data['runtaskdeps'][idx]
932 b = b_data['runtaskdeps'][idx] 1138 b = b_data['runtaskdeps'][idx]
933 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: 1139 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
934 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) 1140 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
935 1141
936 if changed: 1142 if changed:
937 clean_a = clean_basepaths_list(a_data['runtaskdeps']) 1143 clean_a = a_data['runtaskdeps']
938 clean_b = clean_basepaths_list(b_data['runtaskdeps']) 1144 clean_b = b_data['runtaskdeps']
939 if clean_a != clean_b: 1145 if clean_a != clean_b:
940 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) 1146 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
941 else: 1147 else:
@@ -948,7 +1154,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
948 b = b_data['runtaskhashes'] 1154 b = b_data['runtaskhashes']
949 changed, added, removed = dict_diff(a, b) 1155 changed, added, removed = dict_diff(a, b)
950 if added: 1156 if added:
951 for dep in added: 1157 for dep in sorted(added):
952 bdep_found = False 1158 bdep_found = False
953 if removed: 1159 if removed:
954 for bdep in removed: 1160 for bdep in removed:
@@ -956,9 +1162,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
956 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) 1162 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
957 bdep_found = True 1163 bdep_found = True
958 if not bdep_found: 1164 if not bdep_found:
959 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep])) 1165 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
960 if removed: 1166 if removed:
961 for dep in removed: 1167 for dep in sorted(removed):
962 adep_found = False 1168 adep_found = False
963 if added: 1169 if added:
964 for adep in added: 1170 for adep in added:
@@ -966,11 +1172,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
966 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) 1172 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
967 adep_found = True 1173 adep_found = True
968 if not adep_found: 1174 if not adep_found:
969 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep])) 1175 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
970 if changed: 1176 if changed:
971 for dep in changed: 1177 for dep in sorted(changed):
972 if not collapsed: 1178 if not collapsed:
973 output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep])) 1179 output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
974 if callable(recursecb): 1180 if callable(recursecb):
975 recout = recursecb(dep, a[dep], b[dep]) 1181 recout = recursecb(dep, a[dep], b[dep])
976 if recout: 1182 if recout:
@@ -980,6 +1186,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
980 # If a dependent hash changed, might as well print the line above and then defer to the changes in 1186 # If a dependent hash changed, might as well print the line above and then defer to the changes in
981 # that hash since in all likelyhood, they're the same changes this task also saw. 1187 # that hash since in all likelyhood, they're the same changes this task also saw.
982 output = [output[-1]] + recout 1188 output = [output[-1]] + recout
1189 break
983 1190
984 a_taint = a_data.get('taint', None) 1191 a_taint = a_data.get('taint', None)
985 b_taint = b_data.get('taint', None) 1192 b_taint = b_data.get('taint', None)
@@ -1001,7 +1208,7 @@ def calc_basehash(sigdata):
1001 basedata = '' 1208 basedata = ''
1002 1209
1003 alldeps = sigdata['taskdeps'] 1210 alldeps = sigdata['taskdeps']
1004 for dep in alldeps: 1211 for dep in sorted(alldeps):
1005 basedata = basedata + dep 1212 basedata = basedata + dep
1006 val = sigdata['varvals'][dep] 1213 val = sigdata['varvals'][dep]
1007 if val is not None: 1214 if val is not None:
@@ -1017,6 +1224,8 @@ def calc_taskhash(sigdata):
1017 1224
1018 for c in sigdata['file_checksum_values']: 1225 for c in sigdata['file_checksum_values']:
1019 if c[1]: 1226 if c[1]:
1227 if "./" in c[0]:
1228 data = data + c[0]
1020 data = data + c[1] 1229 data = data + c[1]
1021 1230
1022 if 'taint' in sigdata: 1231 if 'taint' in sigdata:
@@ -1031,32 +1240,37 @@ def calc_taskhash(sigdata):
1031def dump_sigfile(a): 1240def dump_sigfile(a):
1032 output = [] 1241 output = []
1033 1242
1034 with open(a, 'rb') as f: 1243 try:
1035 p1 = pickle.Unpickler(f) 1244 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1036 a_data = p1.load() 1245 a_data = json.load(f, object_hook=SetDecoder)
1246 except (TypeError, OSError) as err:
1247 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1248 raise err
1249
1250 handle_renames(a_data)
1037 1251
1038 output.append("basewhitelist: %s" % (a_data['basewhitelist'])) 1252 output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1039 1253
1040 output.append("taskwhitelist: %s" % (a_data['taskwhitelist'])) 1254 output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1041 1255
1042 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) 1256 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1043 1257
1044 output.append("basehash: %s" % (a_data['basehash'])) 1258 output.append("basehash: %s" % (a_data['basehash']))
1045 1259
1046 for dep in a_data['gendeps']: 1260 for dep in sorted(a_data['gendeps']):
1047 output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])) 1261 output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1048 1262
1049 for dep in a_data['varvals']: 1263 for dep in sorted(a_data['varvals']):
1050 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) 1264 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1051 1265
1052 if 'runtaskdeps' in a_data: 1266 if 'runtaskdeps' in a_data:
1053 output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps'])) 1267 output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1054 1268
1055 if 'file_checksum_values' in a_data: 1269 if 'file_checksum_values' in a_data:
1056 output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])) 1270 output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1057 1271
1058 if 'runtaskhashes' in a_data: 1272 if 'runtaskhashes' in a_data:
1059 for dep in a_data['runtaskhashes']: 1273 for dep in sorted(a_data['runtaskhashes']):
1060 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) 1274 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1061 1275
1062 if 'taint' in a_data: 1276 if 'taint' in a_data:
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 47bad6d1fa..66545a65af 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -39,7 +39,7 @@ class TaskData:
39 """ 39 """
40 BitBake Task Data implementation 40 BitBake Task Data implementation
41 """ 41 """
42 def __init__(self, abort = True, skiplist = None, allowincomplete = False): 42 def __init__(self, halt = True, skiplist = None, allowincomplete = False):
43 self.build_targets = {} 43 self.build_targets = {}
44 self.run_targets = {} 44 self.run_targets = {}
45 45
@@ -57,7 +57,7 @@ class TaskData:
57 self.failed_rdeps = [] 57 self.failed_rdeps = []
58 self.failed_fns = [] 58 self.failed_fns = []
59 59
60 self.abort = abort 60 self.halt = halt
61 self.allowincomplete = allowincomplete 61 self.allowincomplete = allowincomplete
62 62
63 self.skiplist = skiplist 63 self.skiplist = skiplist
@@ -328,7 +328,7 @@ class TaskData:
328 try: 328 try:
329 self.add_provider_internal(cfgData, dataCache, item) 329 self.add_provider_internal(cfgData, dataCache, item)
330 except bb.providers.NoProvider: 330 except bb.providers.NoProvider:
331 if self.abort: 331 if self.halt:
332 raise 332 raise
333 self.remove_buildtarget(item) 333 self.remove_buildtarget(item)
334 334
@@ -451,12 +451,12 @@ class TaskData:
451 for target in self.build_targets: 451 for target in self.build_targets:
452 if fn in self.build_targets[target]: 452 if fn in self.build_targets[target]:
453 self.build_targets[target].remove(fn) 453 self.build_targets[target].remove(fn)
454 if len(self.build_targets[target]) == 0: 454 if not self.build_targets[target]:
455 self.remove_buildtarget(target, missing_list) 455 self.remove_buildtarget(target, missing_list)
456 for target in self.run_targets: 456 for target in self.run_targets:
457 if fn in self.run_targets[target]: 457 if fn in self.run_targets[target]:
458 self.run_targets[target].remove(fn) 458 self.run_targets[target].remove(fn)
459 if len(self.run_targets[target]) == 0: 459 if not self.run_targets[target]:
460 self.remove_runtarget(target, missing_list) 460 self.remove_runtarget(target, missing_list)
461 461
462 def remove_buildtarget(self, target, missing_list=None): 462 def remove_buildtarget(self, target, missing_list=None):
@@ -479,7 +479,7 @@ class TaskData:
479 fn = tid.rsplit(":",1)[0] 479 fn = tid.rsplit(":",1)[0]
480 self.fail_fn(fn, missing_list) 480 self.fail_fn(fn, missing_list)
481 481
482 if self.abort and target in self.external_targets: 482 if self.halt and target in self.external_targets:
483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list) 483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
484 raise bb.providers.NoProvider(target) 484 raise bb.providers.NoProvider(target)
485 485
@@ -516,7 +516,7 @@ class TaskData:
516 self.add_provider_internal(cfgData, dataCache, target) 516 self.add_provider_internal(cfgData, dataCache, target)
517 added = added + 1 517 added = added + 1
518 except bb.providers.NoProvider: 518 except bb.providers.NoProvider:
519 if self.abort and target in self.external_targets and not self.allowincomplete: 519 if self.halt and target in self.external_targets and not self.allowincomplete:
520 raise 520 raise
521 if not self.allowincomplete: 521 if not self.allowincomplete:
522 self.remove_buildtarget(target) 522 self.remove_buildtarget(target)
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6d..f6585fb3aa 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest):
44 def parseExpression(self, exp): 44 def parseExpression(self, exp):
45 parsedvar = self.d.expandWithRefs(exp, None) 45 parsedvar = self.d.expandWithRefs(exp, None)
46 self.references = parsedvar.references 46 self.references = parsedvar.references
47 self.execs = parsedvar.execs
47 48
48 def test_simple_reference(self): 49 def test_simple_reference(self):
49 self.setEmptyVars(["FOO"]) 50 self.setEmptyVars(["FOO"])
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest):
61 self.parseExpression("${@d.getVar('BAR') + 'foo'}") 62 self.parseExpression("${@d.getVar('BAR') + 'foo'}")
62 self.assertReferences(set(["BAR"])) 63 self.assertReferences(set(["BAR"]))
63 64
65 def test_python_exec_reference(self):
66 self.parseExpression("${@eval('3 * 5')}")
67 self.assertReferences(set())
68 self.assertExecs(set(["eval"]))
69
64class ShellReferenceTest(ReferenceTest): 70class ShellReferenceTest(ReferenceTest):
65 71
66 def parseExpression(self, exp): 72 def parseExpression(self, exp):
@@ -111,9 +117,9 @@ ${D}${libdir}/pkgconfig/*.pc
111 self.assertExecs(set(["sed"])) 117 self.assertExecs(set(["sed"]))
112 118
113 def test_parameter_expansion_modifiers(self): 119 def test_parameter_expansion_modifiers(self):
114 # - and + are also valid modifiers for parameter expansion, but are 120 # -,+ and : are also valid modifiers for parameter expansion, but are
115 # valid characters in bitbake variable names, so are not included here 121 # valid characters in bitbake variable names, so are not included here
116 for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): 122 for i in ('=', '?', '#', '%', '##', '%%'):
117 name = "foo%sbar" % i 123 name = "foo%sbar" % i
118 self.parseExpression("${%s}" % name) 124 self.parseExpression("${%s}" % name)
119 self.assertNotIn(name, self.references) 125 self.assertNotIn(name, self.references)
@@ -318,7 +324,7 @@ d.getVar(a(), False)
318 "filename": "example.bb", 324 "filename": "example.bb",
319 }) 325 })
320 326
321 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 327 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
322 328
323 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"])) 329 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
324 330
@@ -365,7 +371,7 @@ esac
365 self.d.setVarFlags("FOO", {"func": True}) 371 self.d.setVarFlags("FOO", {"func": True})
366 self.setEmptyVars(execs) 372 self.setEmptyVars(execs)
367 373
368 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 374 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
369 375
370 self.assertEqual(deps, set(["somevar", "inverted"] + execs)) 376 self.assertEqual(deps, set(["somevar", "inverted"] + execs))
371 377
@@ -375,7 +381,7 @@ esac
375 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 381 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
376 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall") 382 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
377 383
378 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 384 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
379 385
380 self.assertEqual(deps, set(["oe_libinstall"])) 386 self.assertEqual(deps, set(["oe_libinstall"]))
381 387
@@ -384,7 +390,7 @@ esac
384 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 390 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
385 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}") 391 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
386 392
387 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 393 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
388 394
389 self.assertEqual(deps, set(["oe_libinstall"])) 395 self.assertEqual(deps, set(["oe_libinstall"]))
390 396
@@ -399,7 +405,7 @@ esac
399 # Check dependencies 405 # Check dependencies
400 self.d.setVar('ANOTHERVAR', expr) 406 self.d.setVar('ANOTHERVAR', expr)
401 self.d.setVar('TESTVAR', 'anothervalue testval testval2') 407 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
402 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d) 408 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
403 self.assertEqual(sorted(values.splitlines()), 409 self.assertEqual(sorted(values.splitlines()),
404 sorted([expr, 410 sorted([expr,
405 'TESTVAR{anothervalue} = Set', 411 'TESTVAR{anothervalue} = Set',
@@ -412,11 +418,55 @@ esac
412 # Check final value 418 # Check final value
413 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) 419 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
414 420
421 def test_contains_vardeps_excluded(self):
422 # Check the ignored_vars option to build_dependencies is handled by contains functionality
423 varval = '${TESTVAR2} ${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)}'
424 self.d.setVar('ANOTHERVAR', varval)
425 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
426 self.d.setVar('TESTVAR2', 'testval3')
427 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d)
428 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
429 self.assertEqual(deps, set(["TESTVAR2"]))
430 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
431
432 # Check the vardepsexclude flag is handled by contains functionality
433 self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR')
434 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
435 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
436 self.assertEqual(deps, set(["TESTVAR2"]))
437 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
438
439 def test_contains_vardeps_override_operators(self):
440 # Check override operators handle dependencies correctly with the contains functionality
441 expr_plain = 'testval'
442 expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
443 expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
444 expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
445 # Check dependencies
446 self.d.setVar('ANOTHERVAR', expr_plain)
447 self.d.prependVar('ANOTHERVAR', expr_prepend)
448 self.d.appendVar('ANOTHERVAR', expr_append)
449 self.d.setVar('ANOTHERVAR:remove', expr_remove)
450 self.d.setVar('TESTVAR1', 'blah')
451 self.d.setVar('TESTVAR2', 'testval2')
452 self.d.setVar('TESTVAR3', 'no-testval')
453 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
454 self.assertEqual(sorted(values.splitlines()),
455 sorted([
456 expr_prepend + expr_plain + expr_append,
457 '_remove of ' + expr_remove,
458 'TESTVAR1{testval1} = Unset',
459 'TESTVAR2{testval2} = Set',
460 'TESTVAR3{no-testval} = Set',
461 ]))
462 # Check final value
463 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
464
415 #Currently no wildcard support 465 #Currently no wildcard support
416 #def test_vardeps_wildcards(self): 466 #def test_vardeps_wildcards(self):
417 # self.d.setVar("oe_libinstall", "echo test") 467 # self.d.setVar("oe_libinstall", "echo test")
418 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 468 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
419 # self.d.setVarFlag("FOO", "vardeps", "oe_*") 469 # self.d.setVarFlag("FOO", "vardeps", "oe_*")
420 # self.assertEquals(deps, set(["oe_libinstall"])) 470 # self.assertEqual(deps, set(["oe_libinstall"]))
421 471
422 472
diff --git a/bitbake/lib/bb/tests/color.py b/bitbake/lib/bb/tests/color.py
index bf03750c69..bb70cb393d 100644
--- a/bitbake/lib/bb/tests/color.py
+++ b/bitbake/lib/bb/tests/color.py
@@ -20,7 +20,7 @@ class ProgressWatcher:
20 def __init__(self): 20 def __init__(self):
21 self._reports = [] 21 self._reports = []
22 22
23 def handle_event(self, event): 23 def handle_event(self, event, d):
24 self._reports.append((event.progress, event.rate)) 24 self._reports.append((event.progress, event.rate))
25 25
26 def reports(self): 26 def reports(self):
@@ -31,7 +31,7 @@ class ColorCodeTests(unittest.TestCase):
31 def setUp(self): 31 def setUp(self):
32 self.d = bb.data.init() 32 self.d = bb.data.init()
33 self._progress_watcher = ProgressWatcher() 33 self._progress_watcher = ProgressWatcher()
34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event) 34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event, data=self.d)
35 35
36 def tearDown(self): 36 def tearDown(self):
37 bb.event.remove("bb.build.TaskProgress", None) 37 bb.event.remove("bb.build.TaskProgress", None)
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
new file mode 100644
index 0000000000..95af3f96d7
--- /dev/null
+++ b/bitbake/lib/bb/tests/compression.py
@@ -0,0 +1,100 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8import bb.compress.lz4
9import bb.compress.zstd
10import contextlib
11import os
12import shutil
13import tempfile
14import unittest
15import subprocess
16
17
18class CompressionTests(object):
19 def setUp(self):
20 self._t = tempfile.TemporaryDirectory()
21 self.tmpdir = Path(self._t.name)
22 self.addCleanup(self._t.cleanup)
23
24 def _file_helper(self, mode_suffix, data):
25 tmp_file = self.tmpdir / "compressed"
26
27 with self.do_open(tmp_file, mode="w" + mode_suffix) as f:
28 f.write(data)
29
30 with self.do_open(tmp_file, mode="r" + mode_suffix) as f:
31 read_data = f.read()
32
33 self.assertEqual(read_data, data)
34
35 def test_text_file(self):
36 self._file_helper("t", "Hello")
37
38 def test_binary_file(self):
39 self._file_helper("b", "Hello".encode("utf-8"))
40
41 def _pipe_helper(self, mode_suffix, data):
42 rfd, wfd = os.pipe()
43 with open(rfd, "rb") as r, open(wfd, "wb") as w:
44 with self.do_open(r, mode="r" + mode_suffix) as decompress:
45 with self.do_open(w, mode="w" + mode_suffix) as compress:
46 compress.write(data)
47 read_data = decompress.read()
48
49 self.assertEqual(read_data, data)
50
51 def test_text_pipe(self):
52 self._pipe_helper("t", "Hello")
53
54 def test_binary_pipe(self):
55 self._pipe_helper("b", "Hello".encode("utf-8"))
56
57 def test_bad_decompress(self):
58 tmp_file = self.tmpdir / "compressed"
59 with tmp_file.open("wb") as f:
60 f.write(b"\x00")
61
62 with self.assertRaises(OSError):
63 with self.do_open(tmp_file, mode="rb", stderr=subprocess.DEVNULL) as f:
64 data = f.read()
65
66
67class LZ4Tests(CompressionTests, unittest.TestCase):
68 def setUp(self):
69 if shutil.which("lz4c") is None:
70 self.skipTest("'lz4c' not found")
71 super().setUp()
72
73 @contextlib.contextmanager
74 def do_open(self, *args, **kwargs):
75 with bb.compress.lz4.open(*args, **kwargs) as f:
76 yield f
77
78
79class ZStdTests(CompressionTests, unittest.TestCase):
80 def setUp(self):
81 if shutil.which("zstd") is None:
82 self.skipTest("'zstd' not found")
83 super().setUp()
84
85 @contextlib.contextmanager
86 def do_open(self, *args, **kwargs):
87 with bb.compress.zstd.open(*args, **kwargs) as f:
88 yield f
89
90
91class PZStdTests(CompressionTests, unittest.TestCase):
92 def setUp(self):
93 if shutil.which("pzstd") is None:
94 self.skipTest("'pzstd' not found")
95 super().setUp()
96
97 @contextlib.contextmanager
98 def do_open(self, *args, **kwargs):
99 with bb.compress.zstd.open(*args, num_threads=2, **kwargs) as f:
100 yield f
diff --git a/bitbake/lib/bb/tests/cooker.py b/bitbake/lib/bb/tests/cooker.py
index c82d4b7b81..9e524ae345 100644
--- a/bitbake/lib/bb/tests/cooker.py
+++ b/bitbake/lib/bb/tests/cooker.py
@@ -1,6 +1,8 @@
1# 1#
2# BitBake Tests for cooker.py 2# BitBake Tests for cooker.py
3# 3#
4# Copyright BitBake Contributors
5#
4# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
5# 7#
6 8
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index 1d4a64b109..cbc7c1ecd4 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase):
60 val = self.d.expand("${@5*12}") 60 val = self.d.expand("${@5*12}")
61 self.assertEqual(str(val), "60") 61 self.assertEqual(str(val), "60")
62 62
63 def test_python_snippet_w_dict(self):
64 val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}")
65 self.assertEqual(str(val), "1")
66
67 def test_python_unexpanded_multi(self):
68 self.d.setVar("bar", "${unsetvar}")
69 val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}")
70 self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo")
71
63 def test_expand_in_python_snippet(self): 72 def test_expand_in_python_snippet(self):
64 val = self.d.expand("${@'boo ' + '${foo}'}") 73 val = self.d.expand("${@'boo ' + '${foo}'}")
65 self.assertEqual(str(val), "boo value_of_foo") 74 self.assertEqual(str(val), "boo value_of_foo")
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase):
68 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 77 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
69 self.assertEqual(str(val), "value_of_foo value_of_bar") 78 self.assertEqual(str(val), "value_of_foo value_of_bar")
70 79
80 def test_python_snippet_function_reference(self):
81 self.d.setVar("TESTVAL", "testvalue")
82 self.d.setVar("testfunc", 'd.getVar("TESTVAL")')
83 context = bb.utils.get_context()
84 context["testfunc"] = lambda d: d.getVar("TESTVAL")
85 val = self.d.expand("${@testfunc(d)}")
86 self.assertEqual(str(val), "testvalue")
87
88 def test_python_snippet_builtin_metadata(self):
89 self.d.setVar("eval", "INVALID")
90 self.d.expand("${@eval('3')}")
91
71 def test_python_unexpanded(self): 92 def test_python_unexpanded(self):
72 self.d.setVar("bar", "${unsetvar}") 93 self.d.setVar("bar", "${unsetvar}")
73 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 94 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
@@ -245,35 +266,35 @@ class TestConcatOverride(unittest.TestCase):
245 266
246 def test_prepend(self): 267 def test_prepend(self):
247 self.d.setVar("TEST", "${VAL}") 268 self.d.setVar("TEST", "${VAL}")
248 self.d.setVar("TEST_prepend", "${FOO}:") 269 self.d.setVar("TEST:prepend", "${FOO}:")
249 self.assertEqual(self.d.getVar("TEST"), "foo:val") 270 self.assertEqual(self.d.getVar("TEST"), "foo:val")
250 271
251 def test_append(self): 272 def test_append(self):
252 self.d.setVar("TEST", "${VAL}") 273 self.d.setVar("TEST", "${VAL}")
253 self.d.setVar("TEST_append", ":${BAR}") 274 self.d.setVar("TEST:append", ":${BAR}")
254 self.assertEqual(self.d.getVar("TEST"), "val:bar") 275 self.assertEqual(self.d.getVar("TEST"), "val:bar")
255 276
256 def test_multiple_append(self): 277 def test_multiple_append(self):
257 self.d.setVar("TEST", "${VAL}") 278 self.d.setVar("TEST", "${VAL}")
258 self.d.setVar("TEST_prepend", "${FOO}:") 279 self.d.setVar("TEST:prepend", "${FOO}:")
259 self.d.setVar("TEST_append", ":val2") 280 self.d.setVar("TEST:append", ":val2")
260 self.d.setVar("TEST_append", ":${BAR}") 281 self.d.setVar("TEST:append", ":${BAR}")
261 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") 282 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
262 283
263 def test_append_unset(self): 284 def test_append_unset(self):
264 self.d.setVar("TEST_prepend", "${FOO}:") 285 self.d.setVar("TEST:prepend", "${FOO}:")
265 self.d.setVar("TEST_append", ":val2") 286 self.d.setVar("TEST:append", ":val2")
266 self.d.setVar("TEST_append", ":${BAR}") 287 self.d.setVar("TEST:append", ":${BAR}")
267 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") 288 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
268 289
269 def test_remove(self): 290 def test_remove(self):
270 self.d.setVar("TEST", "${VAL} ${BAR}") 291 self.d.setVar("TEST", "${VAL} ${BAR}")
271 self.d.setVar("TEST_remove", "val") 292 self.d.setVar("TEST:remove", "val")
272 self.assertEqual(self.d.getVar("TEST"), " bar") 293 self.assertEqual(self.d.getVar("TEST"), " bar")
273 294
274 def test_remove_cleared(self): 295 def test_remove_cleared(self):
275 self.d.setVar("TEST", "${VAL} ${BAR}") 296 self.d.setVar("TEST", "${VAL} ${BAR}")
276 self.d.setVar("TEST_remove", "val") 297 self.d.setVar("TEST:remove", "val")
277 self.d.setVar("TEST", "${VAL} ${BAR}") 298 self.d.setVar("TEST", "${VAL} ${BAR}")
278 self.assertEqual(self.d.getVar("TEST"), "val bar") 299 self.assertEqual(self.d.getVar("TEST"), "val bar")
279 300
@@ -281,42 +302,42 @@ class TestConcatOverride(unittest.TestCase):
281 # (including that whitespace is preserved) 302 # (including that whitespace is preserved)
282 def test_remove_inactive_override(self): 303 def test_remove_inactive_override(self):
283 self.d.setVar("TEST", "${VAL} ${BAR} 123") 304 self.d.setVar("TEST", "${VAL} ${BAR} 123")
284 self.d.setVar("TEST_remove_inactiveoverride", "val") 305 self.d.setVar("TEST:remove:inactiveoverride", "val")
285 self.assertEqual(self.d.getVar("TEST"), "val bar 123") 306 self.assertEqual(self.d.getVar("TEST"), "val bar 123")
286 307
287 def test_doubleref_remove(self): 308 def test_doubleref_remove(self):
288 self.d.setVar("TEST", "${VAL} ${BAR}") 309 self.d.setVar("TEST", "${VAL} ${BAR}")
289 self.d.setVar("TEST_remove", "val") 310 self.d.setVar("TEST:remove", "val")
290 self.d.setVar("TEST_TEST", "${TEST} ${TEST}") 311 self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
291 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar") 312 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
292 313
293 def test_empty_remove(self): 314 def test_empty_remove(self):
294 self.d.setVar("TEST", "") 315 self.d.setVar("TEST", "")
295 self.d.setVar("TEST_remove", "val") 316 self.d.setVar("TEST:remove", "val")
296 self.assertEqual(self.d.getVar("TEST"), "") 317 self.assertEqual(self.d.getVar("TEST"), "")
297 318
298 def test_remove_expansion(self): 319 def test_remove_expansion(self):
299 self.d.setVar("BAR", "Z") 320 self.d.setVar("BAR", "Z")
300 self.d.setVar("TEST", "${BAR}/X Y") 321 self.d.setVar("TEST", "${BAR}/X Y")
301 self.d.setVar("TEST_remove", "${BAR}/X") 322 self.d.setVar("TEST:remove", "${BAR}/X")
302 self.assertEqual(self.d.getVar("TEST"), " Y") 323 self.assertEqual(self.d.getVar("TEST"), " Y")
303 324
304 def test_remove_expansion_items(self): 325 def test_remove_expansion_items(self):
305 self.d.setVar("TEST", "A B C D") 326 self.d.setVar("TEST", "A B C D")
306 self.d.setVar("BAR", "B D") 327 self.d.setVar("BAR", "B D")
307 self.d.setVar("TEST_remove", "${BAR}") 328 self.d.setVar("TEST:remove", "${BAR}")
308 self.assertEqual(self.d.getVar("TEST"), "A C ") 329 self.assertEqual(self.d.getVar("TEST"), "A C ")
309 330
310 def test_remove_preserve_whitespace(self): 331 def test_remove_preserve_whitespace(self):
311 # When the removal isn't active, the original value should be preserved 332 # When the removal isn't active, the original value should be preserved
312 self.d.setVar("TEST", " A B") 333 self.d.setVar("TEST", " A B")
313 self.d.setVar("TEST_remove", "C") 334 self.d.setVar("TEST:remove", "C")
314 self.assertEqual(self.d.getVar("TEST"), " A B") 335 self.assertEqual(self.d.getVar("TEST"), " A B")
315 336
316 def test_remove_preserve_whitespace2(self): 337 def test_remove_preserve_whitespace2(self):
317 # When the removal is active preserve the whitespace 338 # When the removal is active preserve the whitespace
318 self.d.setVar("TEST", " A B") 339 self.d.setVar("TEST", " A B")
319 self.d.setVar("TEST_remove", "B") 340 self.d.setVar("TEST:remove", "B")
320 self.assertEqual(self.d.getVar("TEST"), " A ") 341 self.assertEqual(self.d.getVar("TEST"), " A ")
321 342
322class TestOverrides(unittest.TestCase): 343class TestOverrides(unittest.TestCase):
@@ -329,81 +350,86 @@ class TestOverrides(unittest.TestCase):
329 self.assertEqual(self.d.getVar("TEST"), "testvalue") 350 self.assertEqual(self.d.getVar("TEST"), "testvalue")
330 351
331 def test_one_override(self): 352 def test_one_override(self):
332 self.d.setVar("TEST_bar", "testvalue2") 353 self.d.setVar("TEST:bar", "testvalue2")
333 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 354 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
334 355
335 def test_one_override_unset(self): 356 def test_one_override_unset(self):
336 self.d.setVar("TEST2_bar", "testvalue2") 357 self.d.setVar("TEST2:bar", "testvalue2")
337 358
338 self.assertEqual(self.d.getVar("TEST2"), "testvalue2") 359 self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
339 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) 360 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar'])
340 361
341 def test_multiple_override(self): 362 def test_multiple_override(self):
342 self.d.setVar("TEST_bar", "testvalue2") 363 self.d.setVar("TEST:bar", "testvalue2")
343 self.d.setVar("TEST_local", "testvalue3") 364 self.d.setVar("TEST:local", "testvalue3")
344 self.d.setVar("TEST_foo", "testvalue4") 365 self.d.setVar("TEST:foo", "testvalue4")
345 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 366 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
346 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) 367 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local'])
347 368
348 def test_multiple_combined_overrides(self): 369 def test_multiple_combined_overrides(self):
349 self.d.setVar("TEST_local_foo_bar", "testvalue3") 370 self.d.setVar("TEST:local:foo:bar", "testvalue3")
350 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 371 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
351 372
352 def test_multiple_overrides_unset(self): 373 def test_multiple_overrides_unset(self):
353 self.d.setVar("TEST2_local_foo_bar", "testvalue3") 374 self.d.setVar("TEST2:local:foo:bar", "testvalue3")
354 self.assertEqual(self.d.getVar("TEST2"), "testvalue3") 375 self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
355 376
356 def test_keyexpansion_override(self): 377 def test_keyexpansion_override(self):
357 self.d.setVar("LOCAL", "local") 378 self.d.setVar("LOCAL", "local")
358 self.d.setVar("TEST_bar", "testvalue2") 379 self.d.setVar("TEST:bar", "testvalue2")
359 self.d.setVar("TEST_${LOCAL}", "testvalue3") 380 self.d.setVar("TEST:${LOCAL}", "testvalue3")
360 self.d.setVar("TEST_foo", "testvalue4") 381 self.d.setVar("TEST:foo", "testvalue4")
361 bb.data.expandKeys(self.d) 382 bb.data.expandKeys(self.d)
362 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 383 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
363 384
364 def test_rename_override(self): 385 def test_rename_override(self):
365 self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") 386 self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a")
366 self.d.setVar("OVERRIDES", "class-target") 387 self.d.setVar("OVERRIDES", "class-target")
367 self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") 388 self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools")
368 self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") 389 self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a")
369 390
370 def test_underscore_override(self): 391 def test_underscore_override(self):
371 self.d.setVar("TEST_bar", "testvalue2") 392 self.d.setVar("TEST:bar", "testvalue2")
372 self.d.setVar("TEST_some_val", "testvalue3") 393 self.d.setVar("TEST:some_val", "testvalue3")
373 self.d.setVar("TEST_foo", "testvalue4") 394 self.d.setVar("TEST:foo", "testvalue4")
374 self.d.setVar("OVERRIDES", "foo:bar:some_val") 395 self.d.setVar("OVERRIDES", "foo:bar:some_val")
375 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 396 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
376 397
398 # Test an override with _<numeric> in it based on a real world OE issue
399 def test_underscore_override_2(self):
400 self.d.setVar("TARGET_ARCH", "x86_64")
401 self.d.setVar("PN", "test-${TARGET_ARCH}")
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407
377 def test_remove_with_override(self): 408 def test_remove_with_override(self):
378 self.d.setVar("TEST_bar", "testvalue2") 409 self.d.setVar("TEST:bar", "testvalue2")
379 self.d.setVar("TEST_some_val", "testvalue3 testvalue5") 410 self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
380 self.d.setVar("TEST_some_val_remove", "testvalue3") 411 self.d.setVar("TEST:some_val:remove", "testvalue3")
381 self.d.setVar("TEST_foo", "testvalue4") 412 self.d.setVar("TEST:foo", "testvalue4")
382 self.d.setVar("OVERRIDES", "foo:bar:some_val") 413 self.d.setVar("OVERRIDES", "foo:bar:some_val")
383 self.assertEqual(self.d.getVar("TEST"), " testvalue5") 414 self.assertEqual(self.d.getVar("TEST"), " testvalue5")
384 415
385 def test_append_and_override_1(self): 416 def test_append_and_override_1(self):
386 self.d.setVar("TEST_append", "testvalue2") 417 self.d.setVar("TEST:append", "testvalue2")
387 self.d.setVar("TEST_bar", "testvalue3") 418 self.d.setVar("TEST:bar", "testvalue3")
388 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2") 419 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
389 420
390 def test_append_and_override_2(self): 421 def test_append_and_override_2(self):
391 self.d.setVar("TEST_append_bar", "testvalue2") 422 self.d.setVar("TEST:append:bar", "testvalue2")
392 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2") 423 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
393 424
394 def test_append_and_override_3(self): 425 def test_append_and_override_3(self):
395 self.d.setVar("TEST_bar_append", "testvalue2") 426 self.d.setVar("TEST:bar:append", "testvalue2")
396 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 427 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
397 428
398 # Test an override with _<numeric> in it based on a real world OE issue 429 def test_append_and_unused_override(self):
399 def test_underscore_override(self): 430 # Had a bug where an unused override append could return "" instead of None
400 self.d.setVar("TARGET_ARCH", "x86_64") 431 self.d.setVar("BAR:append:unusedoverride", "testvalue2")
401 self.d.setVar("PN", "test-${TARGET_ARCH}") 432 self.assertEqual(self.d.getVar("BAR"), None)
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407 433
408class TestKeyExpansion(unittest.TestCase): 434class TestKeyExpansion(unittest.TestCase):
409 def setUp(self): 435 def setUp(self):
@@ -498,7 +524,7 @@ class TaskHash(unittest.TestCase):
498 d.setVar("VAR", "val") 524 d.setVar("VAR", "val")
499 # Adding an inactive removal shouldn't change the hash 525 # Adding an inactive removal shouldn't change the hash
500 d.setVar("BAR", "notbar") 526 d.setVar("BAR", "notbar")
501 d.setVar("MYCOMMAND_remove", "${BAR}") 527 d.setVar("MYCOMMAND:remove", "${BAR}")
502 nexthash = gettask_bashhash("mytask", d) 528 nexthash = gettask_bashhash("mytask", d)
503 self.assertEqual(orighash, nexthash) 529 self.assertEqual(orighash, nexthash)
504 530
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py
index 9ca7e9bc8e..ef61891d30 100644
--- a/bitbake/lib/bb/tests/event.py
+++ b/bitbake/lib/bb/tests/event.py
@@ -13,6 +13,7 @@ import pickle
13import threading 13import threading
14import time 14import time
15import unittest 15import unittest
16import tempfile
16from unittest.mock import Mock 17from unittest.mock import Mock
17from unittest.mock import call 18from unittest.mock import call
18 19
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase):
157 self._test_process.event_handler, 158 self._test_process.event_handler,
158 event, 159 event,
159 None) 160 None)
160 self._test_process.event_handler.assert_called_once_with(event) 161 self._test_process.event_handler.assert_called_once_with(event, None)
161 162
162 def test_fire_class_handlers(self): 163 def test_fire_class_handlers(self):
163 """ Test fire_class_handlers method """ 164 """ Test fire_class_handlers method """
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase):
175 bb.event.fire_class_handlers(event1, None) 176 bb.event.fire_class_handlers(event1, None)
176 bb.event.fire_class_handlers(event2, None) 177 bb.event.fire_class_handlers(event2, None)
177 bb.event.fire_class_handlers(event2, None) 178 bb.event.fire_class_handlers(event2, None)
178 expected_event_handler1 = [call(event1)] 179 expected_event_handler1 = [call(event1, None)]
179 expected_event_handler2 = [call(event1), 180 expected_event_handler2 = [call(event1, None),
180 call(event2), 181 call(event2, None),
181 call(event2)] 182 call(event2, None)]
182 self.assertEqual(self._test_process.event_handler1.call_args_list, 183 self.assertEqual(self._test_process.event_handler1.call_args_list,
183 expected_event_handler1) 184 expected_event_handler1)
184 self.assertEqual(self._test_process.event_handler2.call_args_list, 185 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase):
205 bb.event.fire_class_handlers(event2, None) 206 bb.event.fire_class_handlers(event2, None)
206 bb.event.fire_class_handlers(event2, None) 207 bb.event.fire_class_handlers(event2, None)
207 expected_event_handler1 = [] 208 expected_event_handler1 = []
208 expected_event_handler2 = [call(event1)] 209 expected_event_handler2 = [call(event1, None)]
209 self.assertEqual(self._test_process.event_handler1.call_args_list, 210 self.assertEqual(self._test_process.event_handler1.call_args_list,
210 expected_event_handler1) 211 expected_event_handler1)
211 self.assertEqual(self._test_process.event_handler2.call_args_list, 212 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase):
223 self.assertEqual(result, bb.event.Registered) 224 self.assertEqual(result, bb.event.Registered)
224 bb.event.fire_class_handlers(event1, None) 225 bb.event.fire_class_handlers(event1, None)
225 bb.event.fire_class_handlers(event2, None) 226 bb.event.fire_class_handlers(event2, None)
226 expected = [call(event1), call(event2)] 227 expected = [call(event1, None), call(event2, None)]
227 self.assertEqual(self._test_process.event_handler1.call_args_list, 228 self.assertEqual(self._test_process.event_handler1.call_args_list,
228 expected) 229 expected)
229 230
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase):
237 self.assertEqual(result, bb.event.Registered) 238 self.assertEqual(result, bb.event.Registered)
238 bb.event.fire_class_handlers(event1, None) 239 bb.event.fire_class_handlers(event1, None)
239 bb.event.fire_class_handlers(event2, None) 240 bb.event.fire_class_handlers(event2, None)
240 expected = [call(event1), call(event2), call(event1)] 241 expected = [call(event1, None), call(event2, None), call(event1, None)]
241 self.assertEqual(self._test_process.event_handler1.call_args_list, 242 self.assertEqual(self._test_process.event_handler1.call_args_list,
242 expected) 243 expected)
243 244
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase):
251 self.assertEqual(result, bb.event.Registered) 252 self.assertEqual(result, bb.event.Registered)
252 bb.event.fire_class_handlers(event1, None) 253 bb.event.fire_class_handlers(event1, None)
253 bb.event.fire_class_handlers(event2, None) 254 bb.event.fire_class_handlers(event2, None)
254 expected = [call(event1), call(event2), call(event1), call(event2)] 255 expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)]
255 self.assertEqual(self._test_process.event_handler1.call_args_list, 256 self.assertEqual(self._test_process.event_handler1.call_args_list,
256 expected) 257 expected)
257 258
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase):
359 360
360 event1 = bb.event.ConfigParsed() 361 event1 = bb.event.ConfigParsed()
361 bb.event.fire(event1, None) 362 bb.event.fire(event1, None)
362 expected = [call(event1)] 363 expected = [call(event1, None)]
363 self.assertEqual(self._test_process.event_handler1.call_args_list, 364 self.assertEqual(self._test_process.event_handler1.call_args_list,
364 expected) 365 expected)
366 expected = [call(event1)]
365 self.assertEqual(self._test_ui1.event.send.call_args_list, 367 self.assertEqual(self._test_ui1.event.send.call_args_list,
366 expected) 368 expected)
367 369
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase):
450 and disable threadlocks tests """ 452 and disable threadlocks tests """
451 bb.event.fire(bb.event.OperationStarted(), None) 453 bb.event.fire(bb.event.OperationStarted(), None)
452 454
453 def test_enable_threadlock(self): 455 def test_event_threadlock(self):
454 """ Test enable_threadlock method """ 456 """ Test enable_threadlock method """
455 self._set_threadlock_test_mockups() 457 self._set_threadlock_test_mockups()
456 bb.event.enable_threadlock()
457 self._set_and_run_threadlock_test_workers() 458 self._set_and_run_threadlock_test_workers()
458 # Calls to UI handlers should be in order as all the registered 459 # Calls to UI handlers should be in order as all the registered
459 # handlers for the event coming from the first worker should be 460 # handlers for the event coming from the first worker should be
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase):
461 self.assertEqual(self._threadlock_test_calls, 462 self.assertEqual(self._threadlock_test_calls,
462 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"]) 463 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
463 464
464
465 def test_disable_threadlock(self):
466 """ Test disable_threadlock method """
467 self._set_threadlock_test_mockups()
468 bb.event.disable_threadlock()
469 self._set_and_run_threadlock_test_workers()
470 # Calls to UI handlers should be intertwined together. Thanks to the
471 # delay in the registered handlers for the event coming from the first
472 # worker, the event coming from the second worker starts being
473 # processed before finishing handling the first worker event.
474 self.assertEqual(self._threadlock_test_calls,
475 ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
476
477
478class EventClassesTest(unittest.TestCase): 465class EventClassesTest(unittest.TestCase):
479 """ Event classes test class """ 466 """ Event classes test class """
480 467
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase):
482 469
483 def setUp(self): 470 def setUp(self):
484 bb.event.worker_pid = EventClassesTest._worker_pid 471 bb.event.worker_pid = EventClassesTest._worker_pid
472 self.d = bb.data.init()
473 bb.parse.siggen = bb.siggen.init(self.d)
485 474
486 def test_Event(self): 475 def test_Event(self):
487 """ Test the Event base class """ 476 """ Test the Event base class """
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase):
964 event = bb.event.FindSigInfoResult(result) 953 event = bb.event.FindSigInfoResult(result)
965 self.assertEqual(event.result, result) 954 self.assertEqual(event.result, result)
966 self.assertEqual(event.pid, EventClassesTest._worker_pid) 955 self.assertEqual(event.pid, EventClassesTest._worker_pid)
956
957 def test_lineno_in_eventhandler(self):
958 # The error lineno is 5, not 4 since the first line is '\n'
959 error_line = """
960# Comment line1
961# Comment line2
962python test_lineno_in_eventhandler() {
963 This is an error line
964}
965addhandler test_lineno_in_eventhandler
966test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed"
967"""
968
969 with self.assertLogs() as logs:
970 f = tempfile.NamedTemporaryFile(suffix = '.bb')
971 f.write(bytes(error_line, "utf-8"))
972 f.flush()
973 d = bb.parse.handle(f.name, self.d)['']
974
975 output = "".join(logs.output)
976 self.assertTrue(" line 5\n" in output)
diff --git a/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
new file mode 100644
index 0000000000..4a1eb4de13
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
@@ -0,0 +1,59 @@
1<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
2<html>
3 <head>
4 <title>Index of /debian/pool/main/m/minicom</title>
5 </head>
6 <body>
7<h1>Index of /debian/pool/main/m/minicom</h1>
8 <table>
9 <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr>
10 <tr><th colspan="4"><hr></th></tr>
11<tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td>&nbsp;</td><td align="right"> - </td></tr>
12<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr>
13<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr>
14<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr>
15<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr>
16<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr>
17<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr>
18<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr>
19<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr>
20<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
21<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr>
22<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr>
23<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr>
24<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr>
25<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr>
26<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
27<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
28<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr>
29<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
30<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr>
31<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr>
32<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr>
33<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr>
34<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr>
35<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr>
36<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr>
37<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr>
38<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr>
39<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr>
40<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr>
41<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr>
42<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr>
43<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr>
44<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr>
45<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr>
46<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr>
47<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr>
48<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr>
49<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr>
50<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr>
51<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr>
52<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr>
53<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr>
54<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr>
55<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr>
56 <tr><th colspan="4"><hr></th></tr>
57</table>
58<address>Apache Server at ftp.debian.org Port 80</address>
59</body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
new file mode 100644
index 0000000000..4e41af6d6a
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
@@ -0,0 +1,20 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.10/</title>
4</head><body><h1>Index of /sources/libxml2/2.10/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
8<tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr>
9<tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr>
10<tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr>
11<tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr>
12<tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr>
13<tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr>
14<tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr>
15<tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr>
16<tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr>
17<tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr>
18<tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr>
19<tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
20</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
new file mode 100644
index 0000000000..abdfdd0fa2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
@@ -0,0 +1,40 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.9/</title>
4</head><body><h1>Index of /sources/libxml2/2.9/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
8<tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr>
9<tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr>
10<tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr>
11<tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr>
12<tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr>
13<tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr>
14<tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr>
15<tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr>
16<tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr>
17<tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr>
18<tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr>
19<tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr>
20<tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr>
21<tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr>
22<tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr>
23<tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
24<tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr>
25<tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr>
26<tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr>
27<tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr>
28<tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr>
29<tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr>
30<tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr>
31<tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr>
32<tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr>
33<tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr>
34<tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr>
35<tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr>
36<tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr>
37<tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr>
38<tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr>
39<tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr>
40</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
new file mode 100644
index 0000000000..c183e06a55
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
@@ -0,0 +1,19 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/</title>
4</head><body><h1>Index of /sources/libxml2/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
8<tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
9<tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr>
10<tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
11<tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
12<tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
13<tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
14<tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
15<tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr>
16<tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr>
17<tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr>
18<tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr>
19</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 7b2dac7b86..85c1f79ff3 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -6,11 +6,14 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9import contextlib
9import unittest 10import unittest
10import hashlib 11import hashlib
11import tempfile 12import tempfile
12import collections 13import collections
13import os 14import os
15import signal
16import tarfile
14from bb.fetch2 import URI 17from bb.fetch2 import URI
15from bb.fetch2 import FetchMethod 18from bb.fetch2 import FetchMethod
16import bb 19import bb
@@ -18,9 +21,28 @@ from bb.tests.support.httpserver import HTTPService
18 21
19def skipIfNoNetwork(): 22def skipIfNoNetwork():
20 if os.environ.get("BB_SKIP_NETTESTS") == "yes": 23 if os.environ.get("BB_SKIP_NETTESTS") == "yes":
21 return unittest.skip("Network tests being skipped") 24 return unittest.skip("network test")
22 return lambda f: f 25 return lambda f: f
23 26
27class TestTimeout(Exception):
28 # Indicate to pytest that this is not a test suite
29 __test__ = False
30
31class Timeout():
32
33 def __init__(self, seconds):
34 self.seconds = seconds
35
36 def handle_timeout(self, signum, frame):
37 raise TestTimeout("Test failed: timeout reached")
38
39 def __enter__(self):
40 signal.signal(signal.SIGALRM, self.handle_timeout)
41 signal.alarm(self.seconds)
42
43 def __exit__(self, exc_type, exc_val, exc_tb):
44 signal.alarm(0)
45
24class URITest(unittest.TestCase): 46class URITest(unittest.TestCase):
25 test_uris = { 47 test_uris = {
26 "http://www.google.com/index.html" : { 48 "http://www.google.com/index.html" : {
@@ -286,6 +308,21 @@ class URITest(unittest.TestCase):
286 'params': {"someparam" : "1"}, 308 'params': {"someparam" : "1"},
287 'query': {}, 309 'query': {},
288 'relative': True 310 'relative': True
311 },
312 "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": {
313 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip',
314 'scheme': 'https',
315 'hostname': 'www.innodisk.com',
316 'port': None,
317 'hostport': 'www.innodisk.com',
318 'path': '/Download_file',
319 'userinfo': '',
320 'userinfo': '',
321 'username': '',
322 'password': '',
323 'params': {"downloadfilename" : "EGPL-T101.zip"},
324 'query': {"9BE0BF6657": None},
325 'relative': False
289 } 326 }
290 327
291 } 328 }
@@ -376,7 +413,7 @@ class FetcherTest(unittest.TestCase):
376 def setUp(self): 413 def setUp(self):
377 self.origdir = os.getcwd() 414 self.origdir = os.getcwd()
378 self.d = bb.data.init() 415 self.d = bb.data.init()
379 self.tempdir = tempfile.mkdtemp() 416 self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
380 self.dldir = os.path.join(self.tempdir, "download") 417 self.dldir = os.path.join(self.tempdir, "download")
381 os.mkdir(self.dldir) 418 os.mkdir(self.dldir)
382 self.d.setVar("DL_DIR", self.dldir) 419 self.d.setVar("DL_DIR", self.dldir)
@@ -390,57 +427,94 @@ class FetcherTest(unittest.TestCase):
390 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": 427 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
391 print("Not cleaning up %s. Please remove manually." % self.tempdir) 428 print("Not cleaning up %s. Please remove manually." % self.tempdir)
392 else: 429 else:
430 bb.process.run('chmod u+rw -R %s' % self.tempdir)
393 bb.utils.prunedir(self.tempdir) 431 bb.utils.prunedir(self.tempdir)
394 432
433 def git(self, cmd, cwd=None):
434 if isinstance(cmd, str):
435 cmd = 'git -c safe.bareRepository=all ' + cmd
436 else:
437 cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
438 if cwd is None:
439 cwd = self.gitdir
440 return bb.process.run(cmd, cwd=cwd)[0]
441
442 def git_init(self, cwd=None):
443 self.git('init', cwd=cwd)
444 # Explicitly set initial branch to master as
445 # a common setup is to use other default
446 # branch than master.
447 self.git(['checkout', '-b', 'master'], cwd=cwd)
448
449 try:
450 self.git(['config', 'user.email'], cwd=cwd)
451 except bb.process.ExecutionError:
452 self.git(['config', 'user.email', 'you@example.com'], cwd=cwd)
453
454 try:
455 self.git(['config', 'user.name'], cwd=cwd)
456 except bb.process.ExecutionError:
457 self.git(['config', 'user.name', 'Your Name'], cwd=cwd)
458
395class MirrorUriTest(FetcherTest): 459class MirrorUriTest(FetcherTest):
396 460
397 replaceuris = { 461 replaceuris = {
398 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") 462 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/")
399 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", 463 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
400 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 464 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
401 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 465 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
402 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 466 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
403 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 467 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
404 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") 468 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
405 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 469 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
406 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") 470 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
407 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", 471 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890",
408 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") 472 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
409 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 473 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
410 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") 474 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
411 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 475 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
412 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3") 476 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
413 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 477 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
414 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz") 478 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
415 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 479 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
416 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist") 480 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
417 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", 481 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
418 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") 482 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
419 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", 483 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
420 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 484 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
421 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 485 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
422 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 486 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
423 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 487 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
424 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") 488 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
425 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 489 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http",
426 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") 490 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
427 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 491 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
428 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") 492 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
429 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 493 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
430 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http") 494 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
431 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 495 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
496 ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
497 : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
498 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https")
499 : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master",
500 ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz",
501 ("https://somewhere.org/example/1.0.0/example;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/PATH")
502 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
503 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
504 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
432 505
433 #Renaming files doesn't work 506 #Renaming files doesn't work
434 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" 507 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
435 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 508 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
436 } 509 }
437 510
438 mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \ 511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
439 "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \ 512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
440 "https://.*/.* file:///someotherpath/downloads/ \n" \ 513 "https://.*/.* file:///someotherpath/downloads/ " \
441 "http://.*/.* file:///someotherpath/downloads/ \n" 514 "http://.*/.* file:///someotherpath/downloads/"
442 515
443 def test_urireplace(self): 516 def test_urireplace(self):
517 self.d.setVar("FILESPATH", ".")
444 for k, v in self.replaceuris.items(): 518 for k, v in self.replaceuris.items():
445 ud = bb.fetch.FetchData(k[0], self.d) 519 ud = bb.fetch.FetchData(k[0], self.d)
446 ud.setup_localpath(self.d) 520 ud.setup_localpath(self.d)
@@ -463,8 +537,8 @@ class MirrorUriTest(FetcherTest):
463 537
464 def test_mirror_of_mirror(self): 538 def test_mirror_of_mirror(self):
465 # Test if mirror of a mirror works 539 # Test if mirror of a mirror works
466 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n" 540 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
467 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n" 541 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/"
468 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 542 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
469 mirrors = bb.fetch2.mirror_from_string(mirrorvar) 543 mirrors = bb.fetch2.mirror_from_string(mirrorvar)
470 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 544 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
@@ -473,8 +547,8 @@ class MirrorUriTest(FetcherTest):
473 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', 547 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
474 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) 548 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
475 549
476 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \ 550 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ " \
477 "https://.*/[^/]* https://BBBB/B/B/B/ \n" 551 "https://.*/[^/]* https://BBBB/B/B/B/"
478 552
479 def test_recursive(self): 553 def test_recursive(self):
480 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 554 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -488,15 +562,15 @@ class MirrorUriTest(FetcherTest):
488class GitDownloadDirectoryNamingTest(FetcherTest): 562class GitDownloadDirectoryNamingTest(FetcherTest):
489 def setUp(self): 563 def setUp(self):
490 super(GitDownloadDirectoryNamingTest, self).setUp() 564 super(GitDownloadDirectoryNamingTest, self).setUp()
491 self.recipe_url = "git://git.openembedded.org/bitbake" 565 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
492 self.recipe_dir = "git.openembedded.org.bitbake" 566 self.recipe_dir = "git.openembedded.org.bitbake"
493 self.mirror_url = "git://github.com/openembedded/bitbake.git" 567 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
494 self.mirror_dir = "github.com.openembedded.bitbake.git" 568 self.mirror_dir = "github.com.openembedded.bitbake.git"
495 569
496 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 570 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
497 571
498 def setup_mirror_rewrite(self): 572 def setup_mirror_rewrite(self):
499 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 573 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
500 574
501 @skipIfNoNetwork() 575 @skipIfNoNetwork()
502 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self): 576 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -536,16 +610,16 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
536class TarballNamingTest(FetcherTest): 610class TarballNamingTest(FetcherTest):
537 def setUp(self): 611 def setUp(self):
538 super(TarballNamingTest, self).setUp() 612 super(TarballNamingTest, self).setUp()
539 self.recipe_url = "git://git.openembedded.org/bitbake" 613 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
540 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 614 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
541 self.mirror_url = "git://github.com/openembedded/bitbake.git" 615 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
542 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" 616 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
543 617
544 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 618 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
545 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 619 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
546 620
547 def setup_mirror_rewrite(self): 621 def setup_mirror_rewrite(self):
548 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 622 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
549 623
550 @skipIfNoNetwork() 624 @skipIfNoNetwork()
551 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self): 625 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self):
@@ -570,9 +644,9 @@ class TarballNamingTest(FetcherTest):
570class GitShallowTarballNamingTest(FetcherTest): 644class GitShallowTarballNamingTest(FetcherTest):
571 def setUp(self): 645 def setUp(self):
572 super(GitShallowTarballNamingTest, self).setUp() 646 super(GitShallowTarballNamingTest, self).setUp()
573 self.recipe_url = "git://git.openembedded.org/bitbake" 647 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
574 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" 648 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
575 self.mirror_url = "git://github.com/openembedded/bitbake.git" 649 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
576 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" 650 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
577 651
578 self.d.setVar('BB_GIT_SHALLOW', '1') 652 self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -580,7 +654,7 @@ class GitShallowTarballNamingTest(FetcherTest):
580 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 654 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
581 655
582 def setup_mirror_rewrite(self): 656 def setup_mirror_rewrite(self):
583 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 657 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
584 658
585 @skipIfNoNetwork() 659 @skipIfNoNetwork()
586 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self): 660 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -602,6 +676,39 @@ class GitShallowTarballNamingTest(FetcherTest):
602 self.assertIn(self.mirror_tarball, dir) 676 self.assertIn(self.mirror_tarball, dir)
603 677
604 678
679class CleanTarballTest(FetcherTest):
680 def setUp(self):
681 super(CleanTarballTest, self).setUp()
682 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https"
683 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
684
685 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
686 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
687
688 @skipIfNoNetwork()
689 def test_that_the_tarball_contents_does_not_leak_info(self):
690 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
691
692 fetcher.download()
693
694 fetcher.unpack(self.unpackdir)
695 mtime = bb.process.run('git log --all -1 --format=%ct',
696 cwd=os.path.join(self.unpackdir, 'git'))
697 self.assertEqual(len(mtime), 2)
698 mtime = int(mtime[0])
699
700 archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball))
701 self.assertNotEqual(len(archive.members), 0)
702 for member in archive.members:
703 if member.name == ".":
704 continue
705 self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name)
706 self.assertEqual(member.uid, 0, "uid for %s differs" % member.name)
707 self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name)
708 self.assertEqual(member.gid, 0, "gid for %s differs" % member.name)
709 self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name)
710
711
605class FetcherLocalTest(FetcherTest): 712class FetcherLocalTest(FetcherTest):
606 def setUp(self): 713 def setUp(self):
607 def touch(fn): 714 def touch(fn):
@@ -619,6 +726,9 @@ class FetcherLocalTest(FetcherTest):
619 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir')) 726 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
620 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e')) 727 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
621 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device')) 728 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
729 bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir)
730 bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir)
731 bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir)
622 self.d.setVar("FILESPATH", self.localsrcdir) 732 self.d.setVar("FILESPATH", self.localsrcdir)
623 733
624 def fetchUnpack(self, uris): 734 def fetchUnpack(self, uris):
@@ -632,6 +742,11 @@ class FetcherLocalTest(FetcherTest):
632 flst.sort() 742 flst.sort()
633 return flst 743 return flst
634 744
745 def test_local_checksum_fails_no_file(self):
746 self.d.setVar("SRC_URI", "file://404")
747 with self.assertRaises(bb.BBHandledException):
748 bb.fetch.get_checksum_file_list(self.d)
749
635 def test_local(self): 750 def test_local(self):
636 tree = self.fetchUnpack(['file://a', 'file://dir/c']) 751 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
637 self.assertEqual(tree, ['a', 'dir/c']) 752 self.assertEqual(tree, ['a', 'dir/c'])
@@ -673,57 +788,58 @@ class FetcherLocalTest(FetcherTest):
673 with self.assertRaises(bb.fetch2.UnpackError): 788 with self.assertRaises(bb.fetch2.UnpackError):
674 self.fetchUnpack(['file://a;subdir=/bin/sh']) 789 self.fetchUnpack(['file://a;subdir=/bin/sh'])
675 790
676 def test_local_gitfetch_usehead(self): 791 def test_local_striplevel(self):
792 tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1'])
793 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
794
795 def test_local_striplevel_gzip(self):
796 tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1'])
797 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
798
799 def test_local_striplevel_bzip2(self):
800 tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
801 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
802
803 def dummyGitTest(self, suffix):
677 # Create dummy local Git repo 804 # Create dummy local Git repo
678 src_dir = tempfile.mkdtemp(dir=self.tempdir, 805 src_dir = tempfile.mkdtemp(dir=self.tempdir,
679 prefix='gitfetch_localusehead_') 806 prefix='gitfetch_localusehead_')
680 src_dir = os.path.abspath(src_dir) 807 self.gitdir = os.path.abspath(src_dir)
681 bb.process.run("git init", cwd=src_dir) 808 self.git_init()
682 bb.process.run("git commit --allow-empty -m'Dummy commit'", 809 self.git(['commit', '--allow-empty', '-m', 'Dummy commit'])
683 cwd=src_dir)
684 # Use other branch than master 810 # Use other branch than master
685 bb.process.run("git checkout -b my-devel", cwd=src_dir) 811 self.git(['checkout', '-b', 'my-devel'])
686 bb.process.run("git commit --allow-empty -m'Dummy commit 2'", 812 self.git(['commit', '--allow-empty', '-m', 'Dummy commit 2'])
687 cwd=src_dir) 813 orig_rev = self.git(['rev-parse', 'HEAD']).strip()
688 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
689 orig_rev = stdout[0].strip()
690 814
691 # Fetch and check revision 815 # Fetch and check revision
692 self.d.setVar("SRCREV", "AUTOINC") 816 self.d.setVar("SRCREV", "AUTOINC")
693 url = "git://" + src_dir + ";protocol=file;usehead=1" 817 self.d.setVar("__BBSRCREV_SEEN", "1")
818 url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix
694 fetcher = bb.fetch.Fetch([url], self.d) 819 fetcher = bb.fetch.Fetch([url], self.d)
695 fetcher.download() 820 fetcher.download()
696 fetcher.unpack(self.unpackdir) 821 fetcher.unpack(self.unpackdir)
697 stdout = bb.process.run("git rev-parse HEAD", 822 unpack_rev = self.git(['rev-parse', 'HEAD'],
698 cwd=os.path.join(self.unpackdir, 'git')) 823 cwd=os.path.join(self.unpackdir, 'git')).strip()
699 unpack_rev = stdout[0].strip()
700 self.assertEqual(orig_rev, unpack_rev) 824 self.assertEqual(orig_rev, unpack_rev)
701 825
826 def test_local_gitfetch_usehead(self):
827 self.dummyGitTest("usehead=1")
828
702 def test_local_gitfetch_usehead_withname(self): 829 def test_local_gitfetch_usehead_withname(self):
703 # Create dummy local Git repo 830 self.dummyGitTest("usehead=1;name=newName")
704 src_dir = tempfile.mkdtemp(dir=self.tempdir,
705 prefix='gitfetch_localusehead_')
706 src_dir = os.path.abspath(src_dir)
707 bb.process.run("git init", cwd=src_dir)
708 bb.process.run("git commit --allow-empty -m'Dummy commit'",
709 cwd=src_dir)
710 # Use other branch than master
711 bb.process.run("git checkout -b my-devel", cwd=src_dir)
712 bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
713 cwd=src_dir)
714 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
715 orig_rev = stdout[0].strip()
716 831
717 # Fetch and check revision 832 def test_local_gitfetch_shared(self):
718 self.d.setVar("SRCREV", "AUTOINC") 833 self.dummyGitTest("usehead=1;name=sharedName")
719 url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName" 834 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
720 fetcher = bb.fetch.Fetch([url], self.d) 835 self.assertTrue(os.path.exists(alt))
721 fetcher.download() 836
722 fetcher.unpack(self.unpackdir) 837 def test_local_gitfetch_noshared(self):
723 stdout = bb.process.run("git rev-parse HEAD", 838 self.d.setVar('BB_GIT_NOSHARED', '1')
724 cwd=os.path.join(self.unpackdir, 'git')) 839 self.unpackdir += '_noshared'
725 unpack_rev = stdout[0].strip() 840 self.dummyGitTest("usehead=1;name=noSharedName")
726 self.assertEqual(orig_rev, unpack_rev) 841 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
842 self.assertFalse(os.path.exists(alt))
727 843
728class FetcherNoNetworkTest(FetcherTest): 844class FetcherNoNetworkTest(FetcherTest):
729 def setUp(self): 845 def setUp(self):
@@ -831,12 +947,12 @@ class FetcherNoNetworkTest(FetcherTest):
831class FetcherNetworkTest(FetcherTest): 947class FetcherNetworkTest(FetcherTest):
832 @skipIfNoNetwork() 948 @skipIfNoNetwork()
833 def test_fetch(self): 949 def test_fetch(self):
834 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 950 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
835 fetcher.download() 951 fetcher.download()
836 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 952 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
837 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) 953 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
838 self.d.setVar("BB_NO_NETWORK", "1") 954 self.d.setVar("BB_NO_NETWORK", "1")
839 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 955 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
840 fetcher.download() 956 fetcher.download()
841 fetcher.unpack(self.unpackdir) 957 fetcher.unpack(self.unpackdir)
842 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) 958 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
@@ -844,21 +960,22 @@ class FetcherNetworkTest(FetcherTest):
844 960
845 @skipIfNoNetwork() 961 @skipIfNoNetwork()
846 def test_fetch_mirror(self): 962 def test_fetch_mirror(self):
847 self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 963 self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
848 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 964 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
849 fetcher.download() 965 fetcher.download()
850 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 966 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
851 967
852 @skipIfNoNetwork() 968 @skipIfNoNetwork()
853 def test_fetch_mirror_of_mirror(self): 969 def test_fetch_mirror_of_mirror(self):
854 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake") 970 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake")
855 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 971 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
856 fetcher.download() 972 fetcher.download()
857 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 973 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
858 974
859 @skipIfNoNetwork() 975 @skipIfNoNetwork()
860 def test_fetch_file_mirror_of_mirror(self): 976 def test_fetch_file_mirror_of_mirror(self):
861 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake") 977 self.d.setVar("FILESPATH", ".")
978 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
862 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 979 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
863 os.mkdir(self.dldir + "/some2where") 980 os.mkdir(self.dldir + "/some2where")
864 fetcher.download() 981 fetcher.download()
@@ -866,16 +983,46 @@ class FetcherNetworkTest(FetcherTest):
866 983
867 @skipIfNoNetwork() 984 @skipIfNoNetwork()
868 def test_fetch_premirror(self): 985 def test_fetch_premirror(self):
869 self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 986 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
870 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 987 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
871 fetcher.download() 988 fetcher.download()
872 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 989 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
873 990
874 @skipIfNoNetwork() 991 @skipIfNoNetwork()
992 def test_fetch_specify_downloadfilename(self):
993 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
994 fetcher.download()
995 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
996
997 @skipIfNoNetwork()
998 def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
999 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
1000 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1001 fetcher.download()
1002 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1003
1004 @skipIfNoNetwork()
1005 # BZ13039
1006 def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
1007 self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
1008 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1009 fetcher.download()
1010 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1011
1012 @skipIfNoNetwork()
1013 def test_fetch_premirror_use_downloadfilename_to_fetch(self):
1014 # Ensure downloadfilename is used when fetching from premirror.
1015 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
1016 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1017 fetcher.download()
1018 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1019
1020 @skipIfNoNetwork()
875 def gitfetcher(self, url1, url2): 1021 def gitfetcher(self, url1, url2):
876 def checkrevision(self, fetcher): 1022 def checkrevision(self, fetcher):
877 fetcher.unpack(self.unpackdir) 1023 fetcher.unpack(self.unpackdir)
878 revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip() 1024 revision = self.git(['rev-parse', 'HEAD'],
1025 cwd=os.path.join(self.unpackdir, 'git')).strip()
879 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5") 1026 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
880 1027
881 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") 1028 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
@@ -893,25 +1040,25 @@ class FetcherNetworkTest(FetcherTest):
893 1040
894 @skipIfNoNetwork() 1041 @skipIfNoNetwork()
895 def test_gitfetch(self): 1042 def test_gitfetch(self):
896 url1 = url2 = "git://git.openembedded.org/bitbake" 1043 url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
897 self.gitfetcher(url1, url2) 1044 self.gitfetcher(url1, url2)
898 1045
899 @skipIfNoNetwork() 1046 @skipIfNoNetwork()
900 def test_gitfetch_goodsrcrev(self): 1047 def test_gitfetch_goodsrcrev(self):
901 # SRCREV is set but matches rev= parameter 1048 # SRCREV is set but matches rev= parameter
902 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5" 1049 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
903 self.gitfetcher(url1, url2) 1050 self.gitfetcher(url1, url2)
904 1051
905 @skipIfNoNetwork() 1052 @skipIfNoNetwork()
906 def test_gitfetch_badsrcrev(self): 1053 def test_gitfetch_badsrcrev(self):
907 # SRCREV is set but does not match rev= parameter 1054 # SRCREV is set but does not match rev= parameter
908 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5" 1055 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
909 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1056 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
910 1057
911 @skipIfNoNetwork() 1058 @skipIfNoNetwork()
912 def test_gitfetch_tagandrev(self): 1059 def test_gitfetch_tagandrev(self):
913 # SRCREV is set but does not match rev= parameter 1060 # SRCREV is set but does not match rev= parameter
914 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5" 1061 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5;protocol=https"
915 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1062 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
916 1063
917 @skipIfNoNetwork() 1064 @skipIfNoNetwork()
@@ -920,7 +1067,7 @@ class FetcherNetworkTest(FetcherTest):
920 # `usehead=1' and instead fetch the specified SRCREV. See 1067 # `usehead=1' and instead fetch the specified SRCREV. See
921 # test_local_gitfetch_usehead() for a positive use of the usehead 1068 # test_local_gitfetch_usehead() for a positive use of the usehead
922 # feature. 1069 # feature.
923 url = "git://git.openembedded.org/bitbake;usehead=1" 1070 url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https"
924 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1071 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
925 1072
926 @skipIfNoNetwork() 1073 @skipIfNoNetwork()
@@ -929,38 +1076,38 @@ class FetcherNetworkTest(FetcherTest):
929 # `usehead=1' and instead fetch the specified SRCREV. See 1076 # `usehead=1' and instead fetch the specified SRCREV. See
930 # test_local_gitfetch_usehead() for a positive use of the usehead 1077 # test_local_gitfetch_usehead() for a positive use of the usehead
931 # feature. 1078 # feature.
932 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName" 1079 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https"
933 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1080 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
934 1081
935 @skipIfNoNetwork() 1082 @skipIfNoNetwork()
936 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self): 1083 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
937 recipeurl = "git://git.openembedded.org/bitbake" 1084 recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
938 mirrorurl = "git://someserver.org/bitbake" 1085 mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https"
939 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1086 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
940 self.gitfetcher(recipeurl, mirrorurl) 1087 self.gitfetcher(recipeurl, mirrorurl)
941 1088
942 @skipIfNoNetwork() 1089 @skipIfNoNetwork()
943 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self): 1090 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
944 recipeurl = "git://someserver.org/bitbake" 1091 recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https"
945 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1092 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
946 self.gitfetcher(recipeurl, recipeurl) 1093 self.gitfetcher(recipeurl, recipeurl)
947 1094
948 @skipIfNoNetwork() 1095 @skipIfNoNetwork()
949 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): 1096 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
950 realurl = "git://git.openembedded.org/bitbake" 1097 realurl = "https://git.openembedded.org/bitbake"
951 recipeurl = "git://someserver.org/bitbake" 1098 recipeurl = "git://someserver.org/bitbake;protocol=https"
952 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") 1099 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
953 os.chdir(self.tempdir) 1100 os.chdir(self.tempdir)
954 bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True) 1101 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
955 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir)) 1102 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file" % (recipeurl, self.sourcedir))
956 self.gitfetcher(recipeurl, recipeurl) 1103 self.gitfetcher(recipeurl, recipeurl)
957 1104
958 @skipIfNoNetwork() 1105 @skipIfNoNetwork()
959 def test_git_submodule(self): 1106 def test_git_submodule(self):
960 # URL with ssh submodules 1107 # URL with ssh submodules
961 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7" 1108 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https"
962 # Original URL (comment this if you have ssh access to git.yoctoproject.org) 1109 # Original URL (comment this if you have ssh access to git.yoctoproject.org)
963 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee" 1110 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https"
964 fetcher = bb.fetch.Fetch([url], self.d) 1111 fetcher = bb.fetch.Fetch([url], self.d)
965 fetcher.download() 1112 fetcher.download()
966 # Previous cwd has been deleted 1113 # Previous cwd has been deleted
@@ -977,10 +1124,29 @@ class FetcherNetworkTest(FetcherTest):
977 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing') 1124 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
978 1125
979 @skipIfNoNetwork() 1126 @skipIfNoNetwork()
1127 def test_git_submodule_restricted_network_premirrors(self):
1128 # this test is to ensure that premirrors will be tried in restricted network
1129 # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
1130 url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
1131 # create a download directory to be used as premirror later
1132 tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
1133 dl_premirror = os.path.join(tempdir, "download-premirror")
1134 os.mkdir(dl_premirror)
1135 self.d.setVar("DL_DIR", dl_premirror)
1136 fetcher = bb.fetch.Fetch([url], self.d)
1137 fetcher.download()
1138 # now use the premirror in restricted network
1139 self.d.setVar("DL_DIR", self.dldir)
1140 self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
1141 self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
1142 fetcher = bb.fetch.Fetch([url], self.d)
1143 fetcher.download()
1144
1145 @skipIfNoNetwork()
980 def test_git_submodule_dbus_broker(self): 1146 def test_git_submodule_dbus_broker(self):
981 # The following external repositories have show failures in fetch and unpack operations 1147 # The following external repositories have show failures in fetch and unpack operations
982 # We want to avoid regressions! 1148 # We want to avoid regressions!
983 url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" 1149 url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
984 fetcher = bb.fetch.Fetch([url], self.d) 1150 fetcher = bb.fetch.Fetch([url], self.d)
985 fetcher.download() 1151 fetcher.download()
986 # Previous cwd has been deleted 1152 # Previous cwd has been deleted
@@ -996,7 +1162,7 @@ class FetcherNetworkTest(FetcherTest):
996 1162
997 @skipIfNoNetwork() 1163 @skipIfNoNetwork()
998 def test_git_submodule_CLI11(self): 1164 def test_git_submodule_CLI11(self):
999 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" 1165 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
1000 fetcher = bb.fetch.Fetch([url], self.d) 1166 fetcher = bb.fetch.Fetch([url], self.d)
1001 fetcher.download() 1167 fetcher.download()
1002 # Previous cwd has been deleted 1168 # Previous cwd has been deleted
@@ -1011,12 +1177,12 @@ class FetcherNetworkTest(FetcherTest):
1011 @skipIfNoNetwork() 1177 @skipIfNoNetwork()
1012 def test_git_submodule_update_CLI11(self): 1178 def test_git_submodule_update_CLI11(self):
1013 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ 1179 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
1014 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" 1180 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
1015 fetcher = bb.fetch.Fetch([url], self.d) 1181 fetcher = bb.fetch.Fetch([url], self.d)
1016 fetcher.download() 1182 fetcher.download()
1017 1183
1018 # CLI11 that pulls in a newer nlohmann-json 1184 # CLI11 that pulls in a newer nlohmann-json
1019 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" 1185 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
1020 fetcher = bb.fetch.Fetch([url], self.d) 1186 fetcher = bb.fetch.Fetch([url], self.d)
1021 fetcher.download() 1187 fetcher.download()
1022 # Previous cwd has been deleted 1188 # Previous cwd has been deleted
@@ -1030,7 +1196,7 @@ class FetcherNetworkTest(FetcherTest):
1030 1196
1031 @skipIfNoNetwork() 1197 @skipIfNoNetwork()
1032 def test_git_submodule_aktualizr(self): 1198 def test_git_submodule_aktualizr(self):
1033 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" 1199 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
1034 fetcher = bb.fetch.Fetch([url], self.d) 1200 fetcher = bb.fetch.Fetch([url], self.d)
1035 fetcher.download() 1201 fetcher.download()
1036 # Previous cwd has been deleted 1202 # Previous cwd has been deleted
@@ -1050,7 +1216,7 @@ class FetcherNetworkTest(FetcherTest):
1050 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ 1216 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
1051 1217
1052 # This repository also has submodules where the module (name), path and url do not align 1218 # This repository also has submodules where the module (name), path and url do not align
1053 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" 1219 url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
1054 fetcher = bb.fetch.Fetch([url], self.d) 1220 fetcher = bb.fetch.Fetch([url], self.d)
1055 fetcher.download() 1221 fetcher.download()
1056 # Previous cwd has been deleted 1222 # Previous cwd has been deleted
@@ -1073,6 +1239,15 @@ class FetcherNetworkTest(FetcherTest):
1073 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout') 1239 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
1074 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout') 1240 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
1075 1241
1242 @skipIfNoNetwork()
1243 def test_git_submodule_reference_to_parent(self):
1244 self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master"
1245 self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1")
1246 with Timeout(60):
1247 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
1248 with self.assertRaises(bb.fetch2.FetchError):
1249 fetcher.download()
1250
1076class SVNTest(FetcherTest): 1251class SVNTest(FetcherTest):
1077 def skipIfNoSvn(): 1252 def skipIfNoSvn():
1078 import shutil 1253 import shutil
@@ -1107,8 +1282,9 @@ class SVNTest(FetcherTest):
1107 cwd=repo_dir) 1282 cwd=repo_dir)
1108 1283
1109 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) 1284 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
1110 # Github will emulate SVN. Use this to check if we're downloding... 1285 # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
1111 bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .", 1286 # Use still accessible svn repo (only trunk to avoid longer downloads)
1287 bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
1112 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1288 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
1113 bb.process.run("svn commit --non-interactive -m 'Add external'", 1289 bb.process.run("svn commit --non-interactive -m 'Add external'",
1114 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1290 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1136,8 +1312,8 @@ class SVNTest(FetcherTest):
1136 1312
1137 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1313 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1138 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1314 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1139 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist") 1315 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
1140 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit") 1316 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
1141 1317
1142 @skipIfNoSvn() 1318 @skipIfNoSvn()
1143 def test_external_svn(self): 1319 def test_external_svn(self):
@@ -1150,49 +1326,49 @@ class SVNTest(FetcherTest):
1150 1326
1151 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1327 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1152 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1328 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1153 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist") 1329 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
1154 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit") 1330 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
1155 1331
1156class TrustedNetworksTest(FetcherTest): 1332class TrustedNetworksTest(FetcherTest):
1157 def test_trusted_network(self): 1333 def test_trusted_network(self):
1158 # Ensure trusted_network returns False when the host IS in the list. 1334 # Ensure trusted_network returns False when the host IS in the list.
1159 url = "git://Someserver.org/foo;rev=1" 1335 url = "git://Someserver.org/foo;rev=1;branch=master"
1160 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org") 1336 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
1161 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1337 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1162 1338
1163 def test_wild_trusted_network(self): 1339 def test_wild_trusted_network(self):
1164 # Ensure trusted_network returns true when the *.host IS in the list. 1340 # Ensure trusted_network returns true when the *.host IS in the list.
1165 url = "git://Someserver.org/foo;rev=1" 1341 url = "git://Someserver.org/foo;rev=1;branch=master"
1166 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1342 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1167 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1343 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1168 1344
1169 def test_prefix_wild_trusted_network(self): 1345 def test_prefix_wild_trusted_network(self):
1170 # Ensure trusted_network returns true when the prefix matches *.host. 1346 # Ensure trusted_network returns true when the prefix matches *.host.
1171 url = "git://git.Someserver.org/foo;rev=1" 1347 url = "git://git.Someserver.org/foo;rev=1;branch=master"
1172 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1348 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1173 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1349 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1174 1350
1175 def test_two_prefix_wild_trusted_network(self): 1351 def test_two_prefix_wild_trusted_network(self):
1176 # Ensure trusted_network returns true when the prefix matches *.host. 1352 # Ensure trusted_network returns true when the prefix matches *.host.
1177 url = "git://something.git.Someserver.org/foo;rev=1" 1353 url = "git://something.git.Someserver.org/foo;rev=1;branch=master"
1178 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1354 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1179 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1355 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1180 1356
1181 def test_port_trusted_network(self): 1357 def test_port_trusted_network(self):
1182 # Ensure trusted_network returns True, even if the url specifies a port. 1358 # Ensure trusted_network returns True, even if the url specifies a port.
1183 url = "git://someserver.org:8080/foo;rev=1" 1359 url = "git://someserver.org:8080/foo;rev=1;branch=master"
1184 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org") 1360 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
1185 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1361 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1186 1362
1187 def test_untrusted_network(self): 1363 def test_untrusted_network(self):
1188 # Ensure trusted_network returns False when the host is NOT in the list. 1364 # Ensure trusted_network returns False when the host is NOT in the list.
1189 url = "git://someserver.org/foo;rev=1" 1365 url = "git://someserver.org/foo;rev=1;branch=master"
1190 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1366 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1191 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1367 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1192 1368
1193 def test_wild_untrusted_network(self): 1369 def test_wild_untrusted_network(self):
1194 # Ensure trusted_network returns False when the host is NOT in the list. 1370 # Ensure trusted_network returns False when the host is NOT in the list.
1195 url = "git://*.someserver.org/foo;rev=1" 1371 url = "git://*.someserver.org/foo;rev=1;branch=master"
1196 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1372 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1197 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1373 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1198 1374
@@ -1202,14 +1378,17 @@ class URLHandle(unittest.TestCase):
1202 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), 1378 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
1203 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), 1379 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
1204 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), 1380 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
1205 "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}), 1381 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
1206 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), 1382 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
1383 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
1384 r'git://s.o-me_ONE:!#$%^&*()-_={}[]\|:?,.<>~`@git.openembedded.org/bitbake;branch=main;protocol=https': ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', r'!#$%^&*()-_={}[]\|:?,.<>~`', {'branch': 'main', 'protocol' : 'https'}),
1207 } 1385 }
1208 # we require a pathname to encodeurl but users can still pass such urls to 1386 # we require a pathname to encodeurl but users can still pass such urls to
1209 # decodeurl and we need to handle them 1387 # decodeurl and we need to handle them
1210 decodedata = datatable.copy() 1388 decodedata = datatable.copy()
1211 decodedata.update({ 1389 decodedata.update({
1212 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}), 1390 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
1391 "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}),
1213 }) 1392 })
1214 1393
1215 def test_decodeurl(self): 1394 def test_decodeurl(self):
@@ -1226,37 +1405,39 @@ class FetchLatestVersionTest(FetcherTest):
1226 1405
1227 test_git_uris = { 1406 test_git_uris = {
1228 # version pattern "X.Y.Z" 1407 # version pattern "X.Y.Z"
1229 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "") 1408 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
1230 : "1.99.4", 1409 : "1.99.4",
1231 # version pattern "vX.Y" 1410 # version pattern "vX.Y"
1232 # mirror of git.infradead.org since network issues interfered with testing 1411 # mirror of git.infradead.org since network issues interfered with testing
1233 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "") 1412 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
1234 : "1.5.0", 1413 : "1.5.0",
1235 # version pattern "pkg_name-X.Y" 1414 # version pattern "pkg_name-X.Y"
1236 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing 1415 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
1237 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") 1416 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
1238 : "1.0", 1417 : "1.0",
1239 # version pattern "pkg_name-vX.Y.Z" 1418 # version pattern "pkg_name-vX.Y.Z"
1240 ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") 1419 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1241 : "1.4.0", 1420 : "1.4.0",
1242 # combination version pattern 1421 # combination version pattern
1243 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") 1422 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1244 : "1.2.0", 1423 : "1.2.0",
1245 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "") 1424 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1246 : "2014.01", 1425 : "2014.01",
1247 # version pattern "yyyymmdd" 1426 # version pattern "yyyymmdd"
1248 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "") 1427 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1249 : "20120614", 1428 : "20120614",
1250 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1429 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1251 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1430 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
1252 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))") 1431 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
1253 : "0.4.3", 1432 : "0.4.3",
1254 ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))") 1433 ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
1255 : "11.0.0", 1434 : "11.0.0",
1256 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") 1435 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
1257 : "1.3.59", 1436 : "1.3.59",
1258 ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") 1437 ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
1259 : "3.82+dbg0.9", 1438 : "3.82+dbg0.9",
1439 ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
1440 : "0.28.0",
1260 } 1441 }
1261 1442
1262 test_wget_uris = { 1443 test_wget_uris = {
@@ -1272,13 +1453,16 @@ class FetchLatestVersionTest(FetcherTest):
1272 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 1453 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
1273 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") 1454 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
1274 : "2.8.12.1", 1455 : "2.8.12.1",
1456 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
1457 ("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz", "", "")
1458 : "2.10.3",
1275 # 1459 #
1276 # packages with versions only in current directory 1460 # packages with versions only in current directory
1277 # 1461 #
1278 # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 1462 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
1279 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") 1463 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
1280 : "2.19", 1464 : "2.19",
1281 # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 1465 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
1282 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") 1466 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
1283 : "20120814", 1467 : "20120814",
1284 # 1468 #
@@ -1295,12 +1479,18 @@ class FetchLatestVersionTest(FetcherTest):
1295 # 1479 #
1296 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1480 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1297 # https://github.com/apple/cups/releases 1481 # https://github.com/apple/cups/releases
1298 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1482 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1299 : "2.0.0", 1483 : "2.0.0",
1300 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1484 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1301 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1485 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1302 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1486 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1303 : "5.3.10", 1487 : "5.3.10",
1488 #
1489 # packages where the tarball compression changed in the new version
1490 #
1491 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
1492 ("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz", "", "")
1493 : "2.8",
1304 } 1494 }
1305 1495
1306 @skipIfNoNetwork() 1496 @skipIfNoNetwork()
@@ -1315,6 +1505,9 @@ class FetchLatestVersionTest(FetcherTest):
1315 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1505 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1316 r = bb.utils.vercmp_string(v, verstring) 1506 r = bb.utils.vercmp_string(v, verstring)
1317 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1507 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1508 if k[4]:
1509 r = bb.utils.vercmp_string(verstring, k[4])
1510 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
1318 1511
1319 def test_wget_latest_versionstring(self): 1512 def test_wget_latest_versionstring(self):
1320 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1513 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
@@ -1341,17 +1534,14 @@ class FetchLatestVersionTest(FetcherTest):
1341 1534
1342 1535
1343class FetchCheckStatusTest(FetcherTest): 1536class FetchCheckStatusTest(FetcherTest):
1344 test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1537 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
1345 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", 1538 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
1346 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", 1539 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
1347 "https://yoctoproject.org/", 1540 "https://yoctoproject.org/",
1348 "https://yoctoproject.org/documentation", 1541 "https://docs.yoctoproject.org",
1349 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", 1542 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
1350 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", 1543 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
1351 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", 1544 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
1352 "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
1353 "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
1354 "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
1355 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD 1545 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
1356 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" 1546 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
1357 ] 1547 ]
@@ -1389,7 +1579,7 @@ class GitMakeShallowTest(FetcherTest):
1389 FetcherTest.setUp(self) 1579 FetcherTest.setUp(self)
1390 self.gitdir = os.path.join(self.tempdir, 'gitshallow') 1580 self.gitdir = os.path.join(self.tempdir, 'gitshallow')
1391 bb.utils.mkdirhier(self.gitdir) 1581 bb.utils.mkdirhier(self.gitdir)
1392 bb.process.run('git init', cwd=self.gitdir) 1582 self.git_init()
1393 1583
1394 def assertRefs(self, expected_refs): 1584 def assertRefs(self, expected_refs):
1395 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() 1585 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1403,13 +1593,6 @@ class GitMakeShallowTest(FetcherTest):
1403 actual_count = len(revs.splitlines()) 1593 actual_count = len(revs.splitlines())
1404 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1594 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1405 1595
1406 def git(self, cmd):
1407 if isinstance(cmd, str):
1408 cmd = 'git ' + cmd
1409 else:
1410 cmd = ['git'] + cmd
1411 return bb.process.run(cmd, cwd=self.gitdir)[0]
1412
1413 def make_shallow(self, args=None): 1596 def make_shallow(self, args=None):
1414 if args is None: 1597 if args is None:
1415 args = ['HEAD'] 1598 args = ['HEAD']
@@ -1512,13 +1695,13 @@ class GitShallowTest(FetcherTest):
1512 self.srcdir = os.path.join(self.tempdir, 'gitsource') 1695 self.srcdir = os.path.join(self.tempdir, 'gitsource')
1513 1696
1514 bb.utils.mkdirhier(self.srcdir) 1697 bb.utils.mkdirhier(self.srcdir)
1515 self.git('init', cwd=self.srcdir) 1698 self.git_init(cwd=self.srcdir)
1516 self.d.setVar('WORKDIR', self.tempdir) 1699 self.d.setVar('WORKDIR', self.tempdir)
1517 self.d.setVar('S', self.gitdir) 1700 self.d.setVar('S', self.gitdir)
1518 self.d.delVar('PREMIRRORS') 1701 self.d.delVar('PREMIRRORS')
1519 self.d.delVar('MIRRORS') 1702 self.d.delVar('MIRRORS')
1520 1703
1521 uri = 'git://%s;protocol=file;subdir=${S}' % self.srcdir 1704 uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1522 self.d.setVar('SRC_URI', uri) 1705 self.d.setVar('SRC_URI', uri)
1523 self.d.setVar('SRCREV', '${AUTOREV}') 1706 self.d.setVar('SRCREV', '${AUTOREV}')
1524 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 1707 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
@@ -1526,6 +1709,7 @@ class GitShallowTest(FetcherTest):
1526 self.d.setVar('BB_GIT_SHALLOW', '1') 1709 self.d.setVar('BB_GIT_SHALLOW', '1')
1527 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') 1710 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
1528 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') 1711 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
1712 self.d.setVar("__BBSRCREV_SEEN", "1")
1529 1713
1530 def assertRefs(self, expected_refs, cwd=None): 1714 def assertRefs(self, expected_refs, cwd=None):
1531 if cwd is None: 1715 if cwd is None:
@@ -1543,15 +1727,6 @@ class GitShallowTest(FetcherTest):
1543 actual_count = len(revs.splitlines()) 1727 actual_count = len(revs.splitlines())
1544 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1728 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1545 1729
1546 def git(self, cmd, cwd=None):
1547 if isinstance(cmd, str):
1548 cmd = 'git ' + cmd
1549 else:
1550 cmd = ['git'] + cmd
1551 if cwd is None:
1552 cwd = self.gitdir
1553 return bb.process.run(cmd, cwd=cwd)[0]
1554
1555 def add_empty_file(self, path, cwd=None, msg=None): 1730 def add_empty_file(self, path, cwd=None, msg=None):
1556 if msg is None: 1731 if msg is None:
1557 msg = path 1732 msg = path
@@ -1594,6 +1769,7 @@ class GitShallowTest(FetcherTest):
1594 1769
1595 # fetch and unpack, from the shallow tarball 1770 # fetch and unpack, from the shallow tarball
1596 bb.utils.remove(self.gitdir, recurse=True) 1771 bb.utils.remove(self.gitdir, recurse=True)
1772 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1597 bb.utils.remove(ud.clonedir, recurse=True) 1773 bb.utils.remove(ud.clonedir, recurse=True)
1598 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1774 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1599 1775
@@ -1745,7 +1921,7 @@ class GitShallowTest(FetcherTest):
1745 1921
1746 smdir = os.path.join(self.tempdir, 'gitsubmodule') 1922 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1747 bb.utils.mkdirhier(smdir) 1923 bb.utils.mkdirhier(smdir)
1748 self.git('init', cwd=smdir) 1924 self.git_init(cwd=smdir)
1749 # Make this look like it was cloned from a remote... 1925 # Make this look like it was cloned from a remote...
1750 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 1926 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1751 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 1927 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1753,11 +1929,11 @@ class GitShallowTest(FetcherTest):
1753 self.add_empty_file('bsub', cwd=smdir) 1929 self.add_empty_file('bsub', cwd=smdir)
1754 1930
1755 self.git('submodule init', cwd=self.srcdir) 1931 self.git('submodule init', cwd=self.srcdir)
1756 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 1932 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1757 self.git('submodule update', cwd=self.srcdir) 1933 self.git('submodule update', cwd=self.srcdir)
1758 self.git('commit -m submodule -a', cwd=self.srcdir) 1934 self.git('commit -m submodule -a', cwd=self.srcdir)
1759 1935
1760 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 1936 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1761 fetcher, ud = self.fetch_shallow(uri) 1937 fetcher, ud = self.fetch_shallow(uri)
1762 1938
1763 # Verify the main repository is shallow 1939 # Verify the main repository is shallow
@@ -1775,7 +1951,7 @@ class GitShallowTest(FetcherTest):
1775 1951
1776 smdir = os.path.join(self.tempdir, 'gitsubmodule') 1952 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1777 bb.utils.mkdirhier(smdir) 1953 bb.utils.mkdirhier(smdir)
1778 self.git('init', cwd=smdir) 1954 self.git_init(cwd=smdir)
1779 # Make this look like it was cloned from a remote... 1955 # Make this look like it was cloned from a remote...
1780 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 1956 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1781 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 1957 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1783,7 +1959,7 @@ class GitShallowTest(FetcherTest):
1783 self.add_empty_file('bsub', cwd=smdir) 1959 self.add_empty_file('bsub', cwd=smdir)
1784 1960
1785 self.git('submodule init', cwd=self.srcdir) 1961 self.git('submodule init', cwd=self.srcdir)
1786 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 1962 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1787 self.git('submodule update', cwd=self.srcdir) 1963 self.git('submodule update', cwd=self.srcdir)
1788 self.git('commit -m submodule -a', cwd=self.srcdir) 1964 self.git('commit -m submodule -a', cwd=self.srcdir)
1789 1965
@@ -1794,8 +1970,8 @@ class GitShallowTest(FetcherTest):
1794 1970
1795 # Set up the mirror 1971 # Set up the mirror
1796 mirrordir = os.path.join(self.tempdir, 'mirror') 1972 mirrordir = os.path.join(self.tempdir, 'mirror')
1797 os.rename(self.dldir, mirrordir) 1973 bb.utils.rename(self.dldir, mirrordir)
1798 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/\n' % mirrordir) 1974 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/' % mirrordir)
1799 1975
1800 # Fetch from the mirror 1976 # Fetch from the mirror
1801 bb.utils.remove(self.dldir, recurse=True) 1977 bb.utils.remove(self.dldir, recurse=True)
@@ -1818,10 +1994,10 @@ class GitShallowTest(FetcherTest):
1818 self.git('annex init', cwd=self.srcdir) 1994 self.git('annex init', cwd=self.srcdir)
1819 open(os.path.join(self.srcdir, 'c'), 'w').close() 1995 open(os.path.join(self.srcdir, 'c'), 'w').close()
1820 self.git('annex add c', cwd=self.srcdir) 1996 self.git('annex add c', cwd=self.srcdir)
1821 self.git('commit -m annex-c -a', cwd=self.srcdir) 1997 self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
1822 bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex')) 1998 bb.process.run('chmod u+w -R %s' % self.srcdir)
1823 1999
1824 uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir 2000 uri = 'gitannex://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1825 fetcher, ud = self.fetch_shallow(uri) 2001 fetcher, ud = self.fetch_shallow(uri)
1826 2002
1827 self.assertRevCount(1) 2003 self.assertRevCount(1)
@@ -1910,9 +2086,9 @@ class GitShallowTest(FetcherTest):
1910 # Set up the mirror 2086 # Set up the mirror
1911 mirrordir = os.path.join(self.tempdir, 'mirror') 2087 mirrordir = os.path.join(self.tempdir, 'mirror')
1912 bb.utils.mkdirhier(mirrordir) 2088 bb.utils.mkdirhier(mirrordir)
1913 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/\n' % mirrordir) 2089 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/' % mirrordir)
1914 2090
1915 os.rename(os.path.join(self.dldir, mirrortarball), 2091 bb.utils.rename(os.path.join(self.dldir, mirrortarball),
1916 os.path.join(mirrordir, mirrortarball)) 2092 os.path.join(mirrordir, mirrortarball))
1917 2093
1918 # Fetch from the mirror 2094 # Fetch from the mirror
@@ -2033,7 +2209,7 @@ class GitShallowTest(FetcherTest):
2033 2209
2034 @skipIfNoNetwork() 2210 @skipIfNoNetwork()
2035 def test_bitbake(self): 2211 def test_bitbake(self):
2036 self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir) 2212 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
2037 self.git('config core.bare true', cwd=self.srcdir) 2213 self.git('config core.bare true', cwd=self.srcdir)
2038 self.git('fetch', cwd=self.srcdir) 2214 self.git('fetch', cwd=self.srcdir)
2039 2215
@@ -2068,7 +2244,7 @@ class GitShallowTest(FetcherTest):
2068 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') 2244 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0')
2069 self.d.setVar('BB_GIT_SHALLOW', '1') 2245 self.d.setVar('BB_GIT_SHALLOW', '1')
2070 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') 2246 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
2071 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d) 2247 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests;branch=master;protocol=https"], self.d)
2072 fetcher.download() 2248 fetcher.download()
2073 2249
2074 bb.utils.remove(self.dldir + "/*.tar.gz") 2250 bb.utils.remove(self.dldir + "/*.tar.gz")
@@ -2078,12 +2254,18 @@ class GitShallowTest(FetcherTest):
2078 self.assertIn("fstests.doap", dir) 2254 self.assertIn("fstests.doap", dir)
2079 2255
2080class GitLfsTest(FetcherTest): 2256class GitLfsTest(FetcherTest):
2257 def skipIfNoGitLFS():
2258 import shutil
2259 if not shutil.which('git-lfs'):
2260 return unittest.skip('git-lfs not installed')
2261 return lambda f: f
2262
2081 def setUp(self): 2263 def setUp(self):
2082 FetcherTest.setUp(self) 2264 FetcherTest.setUp(self)
2083 2265
2084 self.gitdir = os.path.join(self.tempdir, 'git') 2266 self.gitdir = os.path.join(self.tempdir, 'git')
2085 self.srcdir = os.path.join(self.tempdir, 'gitsource') 2267 self.srcdir = os.path.join(self.tempdir, 'gitsource')
2086 2268
2087 self.d.setVar('WORKDIR', self.tempdir) 2269 self.d.setVar('WORKDIR', self.tempdir)
2088 self.d.setVar('S', self.gitdir) 2270 self.d.setVar('S', self.gitdir)
2089 self.d.delVar('PREMIRRORS') 2271 self.d.delVar('PREMIRRORS')
@@ -2091,22 +2273,18 @@ class GitLfsTest(FetcherTest):
2091 2273
2092 self.d.setVar('SRCREV', '${AUTOREV}') 2274 self.d.setVar('SRCREV', '${AUTOREV}')
2093 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 2275 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
2276 self.d.setVar("__BBSRCREV_SEEN", "1")
2094 2277
2095 bb.utils.mkdirhier(self.srcdir) 2278 bb.utils.mkdirhier(self.srcdir)
2096 self.git('init', cwd=self.srcdir) 2279 self.git_init(cwd=self.srcdir)
2097 with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs: 2280 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
2098 attrs.write('*.mp3 filter=lfs -text')
2099 self.git(['add', '.gitattributes'], cwd=self.srcdir)
2100 self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
2101 2281
2102 def git(self, cmd, cwd=None): 2282 def commit_file(self, filename, content):
2103 if isinstance(cmd, str): 2283 with open(os.path.join(self.srcdir, filename), "w") as f:
2104 cmd = 'git ' + cmd 2284 f.write(content)
2105 else: 2285 self.git(["add", filename], cwd=self.srcdir)
2106 cmd = ['git'] + cmd 2286 self.git(["commit", "-m", "Change"], cwd=self.srcdir)
2107 if cwd is None: 2287 return self.git(["rev-parse", "HEAD"], cwd=self.srcdir).strip()
2108 cwd = self.gitdir
2109 return bb.process.run(cmd, cwd=cwd)[0]
2110 2288
2111 def fetch(self, uri=None, download=True): 2289 def fetch(self, uri=None, download=True):
2112 uris = self.d.getVar('SRC_URI').split() 2290 uris = self.d.getVar('SRC_URI').split()
@@ -2119,65 +2297,158 @@ class GitLfsTest(FetcherTest):
2119 ud = fetcher.ud[uri] 2297 ud = fetcher.ud[uri]
2120 return fetcher, ud 2298 return fetcher, ud
2121 2299
2300 def get_real_git_lfs_file(self):
2301 self.d.setVar('PATH', os.environ.get('PATH'))
2302 fetcher, ud = self.fetch()
2303 fetcher.unpack(self.d.getVar('WORKDIR'))
2304 unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
2305 return unpacked_lfs_file
2306
2307 @skipIfNoGitLFS()
2308 def test_fetch_lfs_on_srcrev_change(self):
2309 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
2310 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2311
2312 @contextlib.contextmanager
2313 def hide_upstream_repository():
2314 """Hide the upstream repository to make sure that git lfs cannot pull from it"""
2315 temp_name = self.srcdir + ".bak"
2316 os.rename(self.srcdir, temp_name)
2317 try:
2318 yield
2319 finally:
2320 os.rename(temp_name, self.srcdir)
2321
2322 def fetch_and_verify(revision, filename, content):
2323 self.d.setVar('SRCREV', revision)
2324 fetcher, ud = self.fetch()
2325
2326 with hide_upstream_repository():
2327 workdir = self.d.getVar('WORKDIR')
2328 fetcher.unpack(workdir)
2329
2330 with open(os.path.join(workdir, "git", filename)) as f:
2331 self.assertEqual(f.read(), content)
2332
2333 commit_1 = self.commit_file("a.mp3", "version 1")
2334 commit_2 = self.commit_file("a.mp3", "version 2")
2335
2336 self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2337
2338 # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are
2339 # available even when the upstream repository disappears.
2340 fetch_and_verify(commit_2, "a.mp3", "version 2")
2341 # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download
2342 # folder.
2343 fetch_and_verify(commit_1, "a.mp3", "version 1")
2344
2345 @skipIfNoGitLFS()
2346 @skipIfNoNetwork()
2347 def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
2348 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
2349 f = self.get_real_git_lfs_file()
2350 self.assertTrue(os.path.exists(f))
2351 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2352
2353 @skipIfNoGitLFS()
2354 @skipIfNoNetwork()
2355 def test_real_git_lfs_repo_succeeds(self):
2356 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
2357 f = self.get_real_git_lfs_file()
2358 self.assertTrue(os.path.exists(f))
2359 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2360
2361 @skipIfNoGitLFS()
2362 @skipIfNoNetwork()
2363 def test_real_git_lfs_repo_skips(self):
2364 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
2365 f = self.get_real_git_lfs_file()
2366 # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
2367 lfs_file = (
2368 'version https://git-lfs.github.com/spec/v1\n'
2369 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
2370 'size 11423554\n'
2371 )
2372
2373 with open(f) as fh:
2374 self.assertEqual(lfs_file, fh.read())
2375
2376 @skipIfNoGitLFS()
2122 def test_lfs_enabled(self): 2377 def test_lfs_enabled(self):
2123 import shutil 2378 import shutil
2124 2379
2125 uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir 2380 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2126 self.d.setVar('SRC_URI', uri) 2381 self.d.setVar('SRC_URI', uri)
2127 2382
2128 # Careful: suppress initial attempt at downloading until 2383 # With git-lfs installed, test that we can fetch and unpack
2129 # we know whether git-lfs is installed. 2384 fetcher, ud = self.fetch()
2130 fetcher, ud = self.fetch(uri=None, download=False) 2385 shutil.rmtree(self.gitdir, ignore_errors=True)
2131 self.assertIsNotNone(ud.method._find_git_lfs) 2386 fetcher.unpack(self.d.getVar('WORKDIR'))
2132
2133 # If git-lfs can be found, the unpack should be successful. Only
2134 # attempt this with the real live copy of git-lfs installed.
2135 if ud.method._find_git_lfs(self.d):
2136 fetcher.download()
2137 shutil.rmtree(self.gitdir, ignore_errors=True)
2138 fetcher.unpack(self.d.getVar('WORKDIR'))
2139
2140 # If git-lfs cannot be found, the unpack should throw an error
2141 with self.assertRaises(bb.fetch2.FetchError):
2142 fetcher.download()
2143 ud.method._find_git_lfs = lambda d: False
2144 shutil.rmtree(self.gitdir, ignore_errors=True)
2145 fetcher.unpack(self.d.getVar('WORKDIR'))
2146 2387
2388 @skipIfNoGitLFS()
2147 def test_lfs_disabled(self): 2389 def test_lfs_disabled(self):
2148 import shutil 2390 import shutil
2149 2391
2150 uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir 2392 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2151 self.d.setVar('SRC_URI', uri) 2393 self.d.setVar('SRC_URI', uri)
2152 2394
2153 # In contrast to test_lfs_enabled(), allow the implicit download 2395 # Verify that the fetcher can survive even if the source
2154 # done by self.fetch() to occur here. The point of this test case
2155 # is to verify that the fetcher can survive even if the source
2156 # repository has Git LFS usage configured. 2396 # repository has Git LFS usage configured.
2157 fetcher, ud = self.fetch() 2397 fetcher, ud = self.fetch()
2158 self.assertIsNotNone(ud.method._find_git_lfs)
2159
2160 # If git-lfs can be found, the unpack should be successful. A
2161 # live copy of git-lfs is not required for this case, so
2162 # unconditionally forge its presence.
2163 ud.method._find_git_lfs = lambda d: True
2164 shutil.rmtree(self.gitdir, ignore_errors=True)
2165 fetcher.unpack(self.d.getVar('WORKDIR')) 2398 fetcher.unpack(self.d.getVar('WORKDIR'))
2166 2399
2167 # If git-lfs cannot be found, the unpack should be successful 2400 def test_lfs_enabled_not_installed(self):
2168 ud.method._find_git_lfs = lambda d: False 2401 import shutil
2169 shutil.rmtree(self.gitdir, ignore_errors=True) 2402
2170 fetcher.unpack(self.d.getVar('WORKDIR')) 2403 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2404 self.d.setVar('SRC_URI', uri)
2405
2406 # Careful: suppress initial attempt at downloading
2407 fetcher, ud = self.fetch(uri=None, download=False)
2408
2409 # Artificially assert that git-lfs is not installed, so
2410 # we can verify a failure to unpack in it's absence.
2411 old_find_git_lfs = ud.method._find_git_lfs
2412 try:
2413 # If git-lfs cannot be found, the unpack should throw an error
2414 with self.assertRaises(bb.fetch2.FetchError):
2415 fetcher.download()
2416 ud.method._find_git_lfs = lambda d: False
2417 shutil.rmtree(self.gitdir, ignore_errors=True)
2418 fetcher.unpack(self.d.getVar('WORKDIR'))
2419 finally:
2420 ud.method._find_git_lfs = old_find_git_lfs
2421
2422 def test_lfs_disabled_not_installed(self):
2423 import shutil
2424
2425 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2426 self.d.setVar('SRC_URI', uri)
2427
2428 # Careful: suppress initial attempt at downloading
2429 fetcher, ud = self.fetch(uri=None, download=False)
2430
2431 # Artificially assert that git-lfs is not installed, so
2432 # we can verify a failure to unpack in it's absence.
2433 old_find_git_lfs = ud.method._find_git_lfs
2434 try:
2435 # Even if git-lfs cannot be found, the unpack should be successful
2436 fetcher.download()
2437 ud.method._find_git_lfs = lambda d: False
2438 shutil.rmtree(self.gitdir, ignore_errors=True)
2439 fetcher.unpack(self.d.getVar('WORKDIR'))
2440 finally:
2441 ud.method._find_git_lfs = old_find_git_lfs
2171 2442
2172class GitURLWithSpacesTest(FetcherTest): 2443class GitURLWithSpacesTest(FetcherTest):
2173 test_git_urls = { 2444 test_git_urls = {
2174 "git://tfs-example.org:22/tfs/example%20path/example.git" : { 2445 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
2175 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git', 2446 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
2176 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', 2447 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
2177 'path': '/tfs/example path/example.git' 2448 'path': '/tfs/example path/example.git'
2178 }, 2449 },
2179 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : { 2450 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
2180 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git', 2451 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
2181 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', 2452 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
2182 'path': '/tfs/example path/example repo.git' 2453 'path': '/tfs/example path/example repo.git'
2183 } 2454 }
@@ -2201,11 +2472,129 @@ class GitURLWithSpacesTest(FetcherTest):
2201 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) 2472 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
2202 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) 2473 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
2203 2474
2475class CrateTest(FetcherTest):
2476 @skipIfNoNetwork()
2477 def test_crate_url(self):
2478
2479 uri = "crate://crates.io/glob/0.2.11"
2480 self.d.setVar('SRC_URI', uri)
2481
2482 uris = self.d.getVar('SRC_URI').split()
2483 d = self.d
2484
2485 fetcher = bb.fetch2.Fetch(uris, self.d)
2486 ud = fetcher.ud[fetcher.urls[0]]
2487
2488 self.assertIn("name", ud.parm)
2489 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2490 self.assertIn("downloadfilename", ud.parm)
2491 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2492
2493 fetcher.download()
2494 fetcher.unpack(self.tempdir)
2495 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2496 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2497 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2498 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2499
2500 @skipIfNoNetwork()
2501 def test_crate_url_matching_recipe(self):
2502
2503 self.d.setVar('BP', 'glob-0.2.11')
2504
2505 uri = "crate://crates.io/glob/0.2.11"
2506 self.d.setVar('SRC_URI', uri)
2507
2508 uris = self.d.getVar('SRC_URI').split()
2509 d = self.d
2510
2511 fetcher = bb.fetch2.Fetch(uris, self.d)
2512 ud = fetcher.ud[fetcher.urls[0]]
2513
2514 self.assertIn("name", ud.parm)
2515 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2516 self.assertIn("downloadfilename", ud.parm)
2517 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2518
2519 fetcher.download()
2520 fetcher.unpack(self.tempdir)
2521 self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked'])
2522 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2523 self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs"))
2524
2525 @skipIfNoNetwork()
2526 def test_crate_url_params(self):
2527
2528 uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed"
2529 self.d.setVar('SRC_URI', uri)
2530
2531 uris = self.d.getVar('SRC_URI').split()
2532 d = self.d
2533
2534 fetcher = bb.fetch2.Fetch(uris, self.d)
2535 ud = fetcher.ud[fetcher.urls[0]]
2536
2537 self.assertIn("name", ud.parm)
2538 self.assertEqual(ud.parm["name"], "aho-corasick-renamed")
2539 self.assertIn("downloadfilename", ud.parm)
2540 self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate")
2541
2542 fetcher.download()
2543 fetcher.unpack(self.tempdir)
2544 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2545 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done'])
2546 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json"))
2547 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs"))
2548
2549 @skipIfNoNetwork()
2550 def test_crate_url_multi(self):
2551
2552 uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
2553 self.d.setVar('SRC_URI', uri)
2554
2555 uris = self.d.getVar('SRC_URI').split()
2556 d = self.d
2557
2558 fetcher = bb.fetch2.Fetch(uris, self.d)
2559 ud = fetcher.ud[fetcher.urls[0]]
2560
2561 self.assertIn("name", ud.parm)
2562 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2563 self.assertIn("downloadfilename", ud.parm)
2564 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2565
2566 ud = fetcher.ud[fetcher.urls[1]]
2567 self.assertIn("name", ud.parm)
2568 self.assertEqual(ud.parm["name"], "time-0.1.35")
2569 self.assertIn("downloadfilename", ud.parm)
2570 self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate")
2571
2572 fetcher.download()
2573 fetcher.unpack(self.tempdir)
2574 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2575 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done'])
2576 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2577 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2578 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
2579 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
2580
2581 @skipIfNoNetwork()
2582 def test_crate_incorrect_cksum(self):
2583 uri = "crate://crates.io/aho-corasick/0.7.20"
2584 self.d.setVar('SRC_URI', uri)
2585 self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest())
2586
2587 uris = self.d.getVar('SRC_URI').split()
2588
2589 fetcher = bb.fetch2.Fetch(uris, self.d)
2590 with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"):
2591 fetcher.download()
2592
2204class NPMTest(FetcherTest): 2593class NPMTest(FetcherTest):
2205 def skipIfNoNpm(): 2594 def skipIfNoNpm():
2206 import shutil 2595 import shutil
2207 if not shutil.which('npm'): 2596 if not shutil.which('npm'):
2208 return unittest.skip('npm not installed, tests being skipped') 2597 return unittest.skip('npm not installed')
2209 return lambda f: f 2598 return lambda f: f
2210 2599
2211 @skipIfNoNpm() 2600 @skipIfNoNpm()
@@ -2250,11 +2639,42 @@ class NPMTest(FetcherTest):
2250 ud = fetcher.ud[fetcher.urls[0]] 2639 ud = fetcher.ud[fetcher.urls[0]]
2251 fetcher.download() 2640 fetcher.download()
2252 self.assertTrue(os.path.exists(ud.localpath)) 2641 self.assertTrue(os.path.exists(ud.localpath))
2642
2643 # Setup the mirror by renaming the download directory
2644 mirrordir = os.path.join(self.tempdir, 'mirror')
2645 bb.utils.rename(self.dldir, mirrordir)
2646 os.mkdir(self.dldir)
2647
2648 # Configure the premirror to be used
2649 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/npm2' % mirrordir)
2650 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2651
2652 # Fetch again
2653 self.assertFalse(os.path.exists(ud.localpath))
2654 # The npm fetcher doesn't handle that the .resolved file disappears
2655 # while the fetcher object exists, which it does when we rename the
2656 # download directory to "mirror" above. Thus we need a new fetcher to go
2657 # with the now empty download directory.
2658 fetcher = bb.fetch.Fetch([url], self.d)
2659 ud = fetcher.ud[fetcher.urls[0]]
2660 fetcher.download()
2661 self.assertTrue(os.path.exists(ud.localpath))
2662
2663 @skipIfNoNpm()
2664 @skipIfNoNetwork()
2665 def test_npm_premirrors_with_specified_filename(self):
2666 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0'
2667 # Fetch once to get a tarball
2668 fetcher = bb.fetch.Fetch([url], self.d)
2669 ud = fetcher.ud[fetcher.urls[0]]
2670 fetcher.download()
2671 self.assertTrue(os.path.exists(ud.localpath))
2253 # Setup the mirror 2672 # Setup the mirror
2254 mirrordir = os.path.join(self.tempdir, 'mirror') 2673 mirrordir = os.path.join(self.tempdir, 'mirror')
2255 bb.utils.mkdirhier(mirrordir) 2674 bb.utils.mkdirhier(mirrordir)
2256 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2675 mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath))
2257 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2676 os.replace(ud.localpath, mirrorfilename)
2677 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s' % mirrorfilename)
2258 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 2678 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2259 # Fetch again 2679 # Fetch again
2260 self.assertFalse(os.path.exists(ud.localpath)) 2680 self.assertFalse(os.path.exists(ud.localpath))
@@ -2274,7 +2694,7 @@ class NPMTest(FetcherTest):
2274 mirrordir = os.path.join(self.tempdir, 'mirror') 2694 mirrordir = os.path.join(self.tempdir, 'mirror')
2275 bb.utils.mkdirhier(mirrordir) 2695 bb.utils.mkdirhier(mirrordir)
2276 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2696 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2277 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2697 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2278 # Update the resolved url to an invalid url 2698 # Update the resolved url to an invalid url
2279 with open(ud.resolvefile, 'r') as f: 2699 with open(ud.resolvefile, 'r') as f:
2280 url = f.read() 2700 url = f.read()
@@ -2293,7 +2713,7 @@ class NPMTest(FetcherTest):
2293 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' 2713 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz'
2294 fetcher = bb.fetch.Fetch([url], self.d) 2714 fetcher = bb.fetch.Fetch([url], self.d)
2295 fetcher.download() 2715 fetcher.download()
2296 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'foo-bar.tgz'))) 2716 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
2297 fetcher.unpack(self.unpackdir) 2717 fetcher.unpack(self.unpackdir)
2298 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar') 2718 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar')
2299 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) 2719 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
@@ -2323,7 +2743,7 @@ class NPMTest(FetcherTest):
2323 @skipIfNoNpm() 2743 @skipIfNoNpm()
2324 @skipIfNoNetwork() 2744 @skipIfNoNetwork()
2325 def test_npm_registry_alternate(self): 2745 def test_npm_registry_alternate(self):
2326 url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2746 url = 'npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0'
2327 fetcher = bb.fetch.Fetch([url], self.d) 2747 fetcher = bb.fetch.Fetch([url], self.d)
2328 fetcher.download() 2748 fetcher.download()
2329 fetcher.unpack(self.unpackdir) 2749 fetcher.unpack(self.unpackdir)
@@ -2433,6 +2853,45 @@ class NPMTest(FetcherTest):
2433 2853
2434 @skipIfNoNpm() 2854 @skipIfNoNpm()
2435 @skipIfNoNetwork() 2855 @skipIfNoNetwork()
2856 def test_npmsw_git(self):
2857 swfile = self.create_shrinkwrap_file({
2858 'dependencies': {
2859 'cookie': {
2860 'version': 'github:jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2861 'from': 'github:jshttp/cookie.git'
2862 }
2863 }
2864 })
2865 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2866 fetcher.download()
2867 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2868
2869 swfile = self.create_shrinkwrap_file({
2870 'dependencies': {
2871 'cookie': {
2872 'version': 'jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2873 'from': 'jshttp/cookie.git'
2874 }
2875 }
2876 })
2877 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2878 fetcher.download()
2879 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2880
2881 swfile = self.create_shrinkwrap_file({
2882 'dependencies': {
2883 'nodejs': {
2884 'version': 'gitlab:gitlab-examples/nodejs.git#892a1f16725e56cc3a2cb0d677be42935c8fc262',
2885 'from': 'gitlab:gitlab-examples/nodejs'
2886 }
2887 }
2888 })
2889 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2890 fetcher.download()
2891 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'gitlab.com.gitlab-examples.nodejs.git')))
2892
2893 @skipIfNoNpm()
2894 @skipIfNoNetwork()
2436 def test_npmsw_dev(self): 2895 def test_npmsw_dev(self):
2437 swfile = self.create_shrinkwrap_file({ 2896 swfile = self.create_shrinkwrap_file({
2438 'dependencies': { 2897 'dependencies': {
@@ -2590,7 +3049,7 @@ class NPMTest(FetcherTest):
2590 mirrordir = os.path.join(self.tempdir, 'mirror') 3049 mirrordir = os.path.join(self.tempdir, 'mirror')
2591 bb.utils.mkdirhier(mirrordir) 3050 bb.utils.mkdirhier(mirrordir)
2592 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3051 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2593 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3052 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2594 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 3053 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2595 # Fetch again 3054 # Fetch again
2596 self.assertFalse(os.path.exists(ud.localpath)) 3055 self.assertFalse(os.path.exists(ud.localpath))
@@ -2619,7 +3078,7 @@ class NPMTest(FetcherTest):
2619 mirrordir = os.path.join(self.tempdir, 'mirror') 3078 mirrordir = os.path.join(self.tempdir, 'mirror')
2620 bb.utils.mkdirhier(mirrordir) 3079 bb.utils.mkdirhier(mirrordir)
2621 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3080 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2622 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3081 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2623 # Fetch again with invalid url 3082 # Fetch again with invalid url
2624 self.assertFalse(os.path.exists(ud.localpath)) 3083 self.assertFalse(os.path.exists(ud.localpath))
2625 swfile = self.create_shrinkwrap_file({ 3084 swfile = self.create_shrinkwrap_file({
@@ -2634,3 +3093,273 @@ class NPMTest(FetcherTest):
2634 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3093 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2635 fetcher.download() 3094 fetcher.download()
2636 self.assertTrue(os.path.exists(ud.localpath)) 3095 self.assertTrue(os.path.exists(ud.localpath))
3096
3097class GitSharedTest(FetcherTest):
3098 def setUp(self):
3099 super(GitSharedTest, self).setUp()
3100 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
3101 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
3102 self.d.setVar("__BBSRCREV_SEEN", "1")
3103
3104 @skipIfNoNetwork()
3105 def test_shared_unpack(self):
3106 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3107
3108 fetcher.download()
3109 fetcher.unpack(self.unpackdir)
3110 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3111 self.assertTrue(os.path.exists(alt))
3112
3113 @skipIfNoNetwork()
3114 def test_noshared_unpack(self):
3115 self.d.setVar('BB_GIT_NOSHARED', '1')
3116 self.unpackdir += '_noshared'
3117 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3118
3119 fetcher.download()
3120 fetcher.unpack(self.unpackdir)
3121 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3122 self.assertFalse(os.path.exists(alt))
3123
3124
3125class FetchPremirroronlyLocalTest(FetcherTest):
3126
3127 def setUp(self):
3128 super(FetchPremirroronlyLocalTest, self).setUp()
3129 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3130 os.mkdir(self.mirrordir)
3131 self.reponame = "bitbake"
3132 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3133 self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https"
3134 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3135 self.d.setVar("BB_NO_NETWORK", "1")
3136 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3137 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3138 self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
3139 self.testfilename = "bitbake-fetch.test"
3140
3141 def make_git_repo(self):
3142 recipeurl = "git:/git.fake.repo/bitbake"
3143 os.makedirs(self.gitdir)
3144 self.git_init(cwd=self.gitdir)
3145 for i in range(0):
3146 self.git_new_commit()
3147 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3148
3149 def git_new_commit(self):
3150 import random
3151 os.unlink(os.path.join(self.mirrordir, self.mirrorname))
3152 branch = self.git("branch --show-current", self.gitdir).split()
3153 with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
3154 testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
3155 self.git("add {}".format(self.testfilename), self.gitdir)
3156 self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
3157 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3158 return self.git("rev-parse HEAD", self.gitdir).strip()
3159
3160 def git_new_branch(self, name):
3161 self.git_new_commit()
3162 head = self.git("rev-parse HEAD", self.gitdir).strip()
3163 self.git("checkout -b {}".format(name), self.gitdir)
3164 newrev = self.git_new_commit()
3165 self.git("checkout {}".format(head), self.gitdir)
3166 return newrev
3167
3168 def test_mirror_multiple_fetches(self):
3169 self.make_git_repo()
3170 self.d.setVar("SRCREV", self.git_new_commit())
3171 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3172 fetcher.download()
3173 fetcher.unpack(self.unpackdir)
3174 ## New commit in premirror. it's not in the download_dir
3175 self.d.setVar("SRCREV", self.git_new_commit())
3176 fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
3177 fetcher2.download()
3178 fetcher2.unpack(self.unpackdir)
3179 ## New commit in premirror. it's not in the download_dir
3180 self.d.setVar("SRCREV", self.git_new_commit())
3181 fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
3182 fetcher3.download()
3183 fetcher3.unpack(self.unpackdir)
3184
3185
3186 def test_mirror_commit_nonexistent(self):
3187 self.make_git_repo()
3188 self.d.setVar("SRCREV", "0"*40)
3189 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3190 with self.assertRaises(bb.fetch2.NetworkAccess):
3191 fetcher.download()
3192
3193 def test_mirror_commit_exists(self):
3194 self.make_git_repo()
3195 self.d.setVar("SRCREV", self.git_new_commit())
3196 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3197 fetcher.download()
3198 fetcher.unpack(self.unpackdir)
3199
3200 def test_mirror_tarball_nonexistent(self):
3201 self.d.setVar("SRCREV", "0"*40)
3202 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3203 with self.assertRaises(bb.fetch2.NetworkAccess):
3204 fetcher.download()
3205
3206 def test_mirror_tarball_multiple_branches(self):
3207 """
3208 test if PREMIRRORS can handle multiple name/branches correctly
3209 both branches have required revisions
3210 """
3211 self.make_git_repo()
3212 branch1rev = self.git_new_branch("testbranch1")
3213 branch2rev = self.git_new_branch("testbranch2")
3214 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3215 self.d.setVar("SRCREV_branch1", branch1rev)
3216 self.d.setVar("SRCREV_branch2", branch2rev)
3217 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3218 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3219 fetcher.download()
3220 fetcher.unpack(os.path.join(self.tempdir, "unpacked"))
3221 unpacked = os.path.join(self.tempdir, "unpacked", "git", self.testfilename)
3222 self.assertTrue(os.path.exists(unpacked), "Repo has not been unpackaged properly!")
3223 with open(unpacked, 'r') as f:
3224 content = f.read()
3225 ## We expect to see testbranch1 in the file, not master, not testbranch2
3226 self.assertTrue(content.find("testbranch1") != -1, "Wrong branch has been checked out!")
3227
3228 def test_mirror_tarball_multiple_branches_nobranch(self):
3229 """
3230 test if PREMIRRORS can handle multiple name/branches correctly
3231 Unbalanced name/branches raises ParameterError
3232 """
3233 self.make_git_repo()
3234 branch1rev = self.git_new_branch("testbranch1")
3235 branch2rev = self.git_new_branch("testbranch2")
3236 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1;protocol=https;name=branch1,branch2"
3237 self.d.setVar("SRCREV_branch1", branch1rev)
3238 self.d.setVar("SRCREV_branch2", branch2rev)
3239 with self.assertRaises(bb.fetch2.ParameterError):
3240 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3241
3242 def test_mirror_tarball_multiple_branches_norev(self):
3243 """
3244 test if PREMIRRORS can handle multiple name/branches correctly
3245 one of the branches specifies non existing SRCREV
3246 """
3247 self.make_git_repo()
3248 branch1rev = self.git_new_branch("testbranch1")
3249 branch2rev = self.git_new_branch("testbranch2")
3250 self.recipe_url = "git://git.fake.repo/bitbake;branch=testbranch1,testbranch2;protocol=https;name=branch1,branch2"
3251 self.d.setVar("SRCREV_branch1", branch1rev)
3252 self.d.setVar("SRCREV_branch2", "0"*40)
3253 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3254 self.assertTrue(os.path.exists(self.mirrorfile), "Mirror file doesn't exist")
3255 with self.assertRaises(bb.fetch2.NetworkAccess):
3256 fetcher.download()
3257
3258
3259class FetchPremirroronlyNetworkTest(FetcherTest):
3260
3261 def setUp(self):
3262 super(FetchPremirroronlyNetworkTest, self).setUp()
3263 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3264 os.mkdir(self.mirrordir)
3265 self.reponame = "fstests"
3266 self.clonedir = os.path.join(self.tempdir, "git")
3267 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
3268 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https"
3269 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3270 self.d.setVar("BB_NO_NETWORK", "0")
3271 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3272
3273 def make_git_repo(self):
3274 import shutil
3275 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
3276 os.makedirs(self.clonedir)
3277 self.git("clone --bare --shallow-since=\"01.01.2013\" {}".format(self.recipe_url), self.clonedir)
3278 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3279 shutil.rmtree(self.clonedir)
3280
3281 @skipIfNoNetwork()
3282 def test_mirror_tarball_updated(self):
3283 self.make_git_repo()
3284 ## Upstream commit is in the mirror
3285 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
3286 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3287 fetcher.download()
3288
3289 @skipIfNoNetwork()
3290 def test_mirror_tarball_outdated(self):
3291 self.make_git_repo()
3292 ## Upstream commit not in the mirror
3293 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
3294 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3295 with self.assertRaises(bb.fetch2.NetworkAccess):
3296 fetcher.download()
3297
3298class FetchPremirroronlyMercurialTest(FetcherTest):
3299 """ Test for premirrors with mercurial repos
3300 the test covers also basic hg:// clone (see fetch_and_create_tarball
3301 """
3302 def skipIfNoHg():
3303 import shutil
3304 if not shutil.which('hg'):
3305 return unittest.skip('Mercurial not installed')
3306 return lambda f: f
3307
3308 def setUp(self):
3309 super(FetchPremirroronlyMercurialTest, self).setUp()
3310 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3311 os.mkdir(self.mirrordir)
3312 self.reponame = "libgnt"
3313 self.clonedir = os.path.join(self.tempdir, "hg")
3314 self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt"
3315 self.d.setVar("SRCREV", "53e8b422faaf")
3316 self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz"
3317
3318 def fetch_and_create_tarball(self):
3319 """
3320 Ask bitbake to download repo and prepare mirror tarball for us
3321 """
3322 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
3323 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3324 fetcher.download()
3325 mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname)
3326 self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile))
3327 ## moving tarball to mirror directory
3328 os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname))
3329 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0")
3330
3331
3332 @skipIfNoNetwork()
3333 @skipIfNoHg()
3334 def test_premirror_mercurial(self):
3335 self.fetch_and_create_tarball()
3336 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3337 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3338 self.d.setVar("BB_NO_NETWORK", "1")
3339 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3340 fetcher.download()
3341
3342class FetchPremirroronlyBrokenTarball(FetcherTest):
3343
3344 def setUp(self):
3345 super(FetchPremirroronlyBrokenTarball, self).setUp()
3346 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3347 os.mkdir(self.mirrordir)
3348 self.reponame = "bitbake"
3349 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3350 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https"
3351 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3352 self.d.setVar("BB_NO_NETWORK", "1")
3353 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3354 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3355 with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz:
3356 targz.write("This is not tar.gz file!")
3357
3358 def test_mirror_broken_download(self):
3359 import sys
3360 self.d.setVar("SRCREV", "0"*40)
3361 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3362 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
3363 fetcher.download()
3364 output = "".join(logs.output)
3365 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 9e21e18425..72d1962e7e 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -98,8 +98,8 @@ exportD = "d"
98 98
99 99
100 overridetest = """ 100 overridetest = """
101RRECOMMENDS_${PN} = "a" 101RRECOMMENDS:${PN} = "a"
102RRECOMMENDS_${PN}_libc = "b" 102RRECOMMENDS:${PN}:libc = "b"
103OVERRIDES = "libc:${PN}" 103OVERRIDES = "libc:${PN}"
104PN = "gtk+" 104PN = "gtk+"
105""" 105"""
@@ -110,16 +110,16 @@ PN = "gtk+"
110 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 110 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
111 bb.data.expandKeys(d) 111 bb.data.expandKeys(d)
112 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 112 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
113 d.setVar("RRECOMMENDS_gtk+", "c") 113 d.setVar("RRECOMMENDS:gtk+", "c")
114 self.assertEqual(d.getVar("RRECOMMENDS"), "c") 114 self.assertEqual(d.getVar("RRECOMMENDS"), "c")
115 115
116 overridetest2 = """ 116 overridetest2 = """
117EXTRA_OECONF = "" 117EXTRA_OECONF = ""
118EXTRA_OECONF_class-target = "b" 118EXTRA_OECONF:class-target = "b"
119EXTRA_OECONF_append = " c" 119EXTRA_OECONF:append = " c"
120""" 120"""
121 121
122 def test_parse_overrides(self): 122 def test_parse_overrides2(self):
123 f = self.parsehelper(self.overridetest2) 123 f = self.parsehelper(self.overridetest2)
124 d = bb.parse.handle(f.name, self.d)[''] 124 d = bb.parse.handle(f.name, self.d)['']
125 d.appendVar("EXTRA_OECONF", " d") 125 d.appendVar("EXTRA_OECONF", " d")
@@ -128,7 +128,7 @@ EXTRA_OECONF_append = " c"
128 128
129 overridetest3 = """ 129 overridetest3 = """
130DESCRIPTION = "A" 130DESCRIPTION = "A"
131DESCRIPTION_${PN}-dev = "${DESCRIPTION} B" 131DESCRIPTION:${PN}-dev = "${DESCRIPTION} B"
132PN = "bc" 132PN = "bc"
133""" 133"""
134 134
@@ -136,15 +136,15 @@ PN = "bc"
136 f = self.parsehelper(self.overridetest3) 136 f = self.parsehelper(self.overridetest3)
137 d = bb.parse.handle(f.name, self.d)[''] 137 d = bb.parse.handle(f.name, self.d)['']
138 bb.data.expandKeys(d) 138 bb.data.expandKeys(d)
139 self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") 139 self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
140 d.setVar("DESCRIPTION", "E") 140 d.setVar("DESCRIPTION", "E")
141 d.setVar("DESCRIPTION_bc-dev", "C D") 141 d.setVar("DESCRIPTION:bc-dev", "C D")
142 d.setVar("OVERRIDES", "bc-dev") 142 d.setVar("OVERRIDES", "bc-dev")
143 self.assertEqual(d.getVar("DESCRIPTION"), "C D") 143 self.assertEqual(d.getVar("DESCRIPTION"), "C D")
144 144
145 145
146 classextend = """ 146 classextend = """
147VAR_var_override1 = "B" 147VAR_var:override1 = "B"
148EXTRA = ":override1" 148EXTRA = ":override1"
149OVERRIDES = "nothing${EXTRA}" 149OVERRIDES = "nothing${EXTRA}"
150 150
@@ -164,6 +164,7 @@ python () {
164 # become unset/disappear. 164 # become unset/disappear.
165 # 165 #
166 def test_parse_classextend_contamination(self): 166 def test_parse_classextend_contamination(self):
167 self.d.setVar("__bbclasstype", "recipe")
167 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") 168 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
168 #clsname = os.path.basename(cls.name).replace(".bbclass", "") 169 #clsname = os.path.basename(cls.name).replace(".bbclass", "")
169 self.classextend = self.classextend.replace("###CLASS###", cls.name) 170 self.classextend = self.classextend.replace("###CLASS###", cls.name)
@@ -185,12 +186,158 @@ deltask ${EMPTYVAR}
185""" 186"""
186 def test_parse_addtask_deltask(self): 187 def test_parse_addtask_deltask(self):
187 import sys 188 import sys
188 f = self.parsehelper(self.addtask_deltask) 189
190 with self.assertLogs() as logs:
191 f = self.parsehelper(self.addtask_deltask)
192 d = bb.parse.handle(f.name, self.d)['']
193
194 output = "".join(logs.output)
195 self.assertTrue("addtask contained multiple 'before' keywords" in output)
196 self.assertTrue("addtask contained multiple 'after' keywords" in output)
197 self.assertTrue('addtask ignored: " do_patch"' in output)
198 #self.assertTrue('dependent task do_foo for do_patch does not exist' in output)
199
200 broken_multiline_comment = """
201# First line of comment \\
202# Second line of comment \\
203
204"""
205 def test_parse_broken_multiline_comment(self):
206 f = self.parsehelper(self.broken_multiline_comment)
207 with self.assertRaises(bb.BBHandledException):
208 d = bb.parse.handle(f.name, self.d)['']
209
210
211 comment_in_var = """
212VAR = " \\
213 SOMEVAL \\
214# some comment \\
215 SOMEOTHERVAL \\
216"
217"""
218 def test_parse_comment_in_var(self):
219 f = self.parsehelper(self.comment_in_var)
220 with self.assertRaises(bb.BBHandledException):
221 d = bb.parse.handle(f.name, self.d)['']
222
223
224 at_sign_in_var_flag = """
225A[flag@.service] = "nonet"
226B[flag@.target] = "ntb"
227C[f] = "flag"
228
229unset A[flag@.service]
230"""
231 def test_parse_at_sign_in_var_flag(self):
232 f = self.parsehelper(self.at_sign_in_var_flag)
189 d = bb.parse.handle(f.name, self.d)[''] 233 d = bb.parse.handle(f.name, self.d)['']
234 self.assertEqual(d.getVar("A"), None)
235 self.assertEqual(d.getVar("B"), None)
236 self.assertEqual(d.getVarFlag("A","flag@.service"), None)
237 self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb")
238 self.assertEqual(d.getVarFlag("C","f"), "flag")
239
240 def test_parse_invalid_at_sign_in_var_flag(self):
241 invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
242 f = self.parsehelper(invalid_at_sign)
243 with self.assertRaises(bb.parse.ParseError):
244 d = bb.parse.handle(f.name, self.d)['']
245
246 export_function_recipe = """
247inherit someclass
248"""
249
250 export_function_recipe2 = """
251inherit someclass
252
253do_compile () {
254 false
255}
256
257python do_compilepython () {
258 bb.note("Something else")
259}
260
261"""
262 export_function_class = """
263someclass_do_compile() {
264 true
265}
266
267python someclass_do_compilepython () {
268 bb.note("Something")
269}
270
271EXPORT_FUNCTIONS do_compile do_compilepython
272"""
273
274 export_function_class2 = """
275secondclass_do_compile() {
276 true
277}
278
279python secondclass_do_compilepython () {
280 bb.note("Something")
281}
282
283EXPORT_FUNCTIONS do_compile do_compilepython
284"""
190 285
191 stdout = sys.stdout.getvalue() 286 def test_parse_export_functions(self):
192 self.assertTrue("addtask contained multiple 'before' keywords" in stdout) 287 def check_function_flags(d):
193 self.assertTrue("addtask contained multiple 'after' keywords" in stdout) 288 self.assertEqual(d.getVarFlag("do_compile", "func"), 1)
194 self.assertTrue('addtask ignored: " do_patch"' in stdout) 289 self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1)
195 #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout) 290 self.assertEqual(d.getVarFlag("do_compile", "python"), None)
291 self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1")
292
293 with tempfile.TemporaryDirectory() as tempdir:
294 self.d.setVar("__bbclasstype", "recipe")
295 recipename = tempdir + "/recipe.bb"
296 os.makedirs(tempdir + "/classes")
297 with open(tempdir + "/classes/someclass.bbclass", "w") as f:
298 f.write(self.export_function_class)
299 f.flush()
300 with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
301 f.write(self.export_function_class2)
302 f.flush()
303
304 with open(recipename, "w") as f:
305 f.write(self.export_function_recipe)
306 f.flush()
307 os.chdir(tempdir)
308 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
309 self.assertIn("someclass_do_compile", d.getVar("do_compile"))
310 self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython"))
311 check_function_flags(d)
312
313 recipename2 = tempdir + "/recipe2.bb"
314 with open(recipename2, "w") as f:
315 f.write(self.export_function_recipe2)
316 f.flush()
317
318 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
319 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
320 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
321 self.assertIn("false", d.getVar("do_compile"))
322 self.assertIn("else", d.getVar("do_compilepython"))
323 check_function_flags(d)
324
325 with open(recipename, "a+") as f:
326 f.write("\ninherit secondclass\n")
327 f.flush()
328 with open(recipename2, "a+") as f:
329 f.write("\ninherit secondclass\n")
330 f.flush()
331
332 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
333 self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
334 self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython"))
335 check_function_flags(d)
336
337 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
338 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
339 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
340 self.assertIn("false", d.getVar("do_compile"))
341 self.assertIn("else", d.getVar("do_compilepython"))
342 check_function_flags(d)
196 343
diff --git a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index efebf001a9..05d7fd07dd 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}"
12T = "${TMPDIR}/workdir/${PN}/temp" 12T = "${TMPDIR}/workdir/${PN}/temp"
13BB_NUMBER_THREADS = "4" 13BB_NUMBER_THREADS = "4"
14 14
15BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE" 15BB_BASEHASH_IGNORE_VARS = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK"
16 16
17include conf/multiconfig/${BB_CURRENT_MC}.conf 17include conf/multiconfig/${BB_CURRENT_MC}.conf
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index 3d51779d6c..cc87e8d6a8 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -29,13 +29,14 @@ class RunQueueTests(unittest.TestCase):
29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): 29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False):
30 env = os.environ.copy() 30 env = os.environ.copy()
31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) 31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
32 env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS" 32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
33 env["SSTATEVALID"] = sstatevalid 33 env["SSTATEVALID"] = sstatevalid
34 env["SLOWTASKS"] = slowtasks 34 env["SLOWTASKS"] = slowtasks
35 env["TOPDIR"] = builddir
35 if extraenv: 36 if extraenv:
36 for k in extraenv: 37 for k in extraenv:
37 env[k] = extraenv[k] 38 env[k] = extraenv[k]
38 env["BB_ENV_EXTRAWHITE"] = env["BB_ENV_EXTRAWHITE"] + " " + k 39 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = env["BB_ENV_PASSTHROUGH_ADDITIONS"] + " " + k
39 try: 40 try:
40 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) 41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
41 print(output) 42 print(output)
@@ -58,6 +59,8 @@ class RunQueueTests(unittest.TestCase):
58 expected = ['a1:' + x for x in self.alltasks] 59 expected = ['a1:' + x for x in self.alltasks]
59 self.assertEqual(set(tasks), set(expected)) 60 self.assertEqual(set(tasks), set(expected))
60 61
62 self.shutdown(tempdir)
63
61 def test_single_setscenevalid(self): 64 def test_single_setscenevalid(self):
62 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 65 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
63 cmd = ["bitbake", "a1"] 66 cmd = ["bitbake", "a1"]
@@ -68,6 +71,8 @@ class RunQueueTests(unittest.TestCase):
68 'a1:populate_sysroot', 'a1:build'] 71 'a1:populate_sysroot', 'a1:build']
69 self.assertEqual(set(tasks), set(expected)) 72 self.assertEqual(set(tasks), set(expected))
70 73
74 self.shutdown(tempdir)
75
71 def test_intermediate_setscenevalid(self): 76 def test_intermediate_setscenevalid(self):
72 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 77 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
73 cmd = ["bitbake", "a1"] 78 cmd = ["bitbake", "a1"]
@@ -77,6 +82,8 @@ class RunQueueTests(unittest.TestCase):
77 'a1:populate_sysroot_setscene', 'a1:build'] 82 'a1:populate_sysroot_setscene', 'a1:build']
78 self.assertEqual(set(tasks), set(expected)) 83 self.assertEqual(set(tasks), set(expected))
79 84
85 self.shutdown(tempdir)
86
80 def test_intermediate_notcovered(self): 87 def test_intermediate_notcovered(self):
81 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 88 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
82 cmd = ["bitbake", "a1"] 89 cmd = ["bitbake", "a1"]
@@ -86,6 +93,8 @@ class RunQueueTests(unittest.TestCase):
86 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 93 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
87 self.assertEqual(set(tasks), set(expected)) 94 self.assertEqual(set(tasks), set(expected))
88 95
96 self.shutdown(tempdir)
97
89 def test_all_setscenevalid(self): 98 def test_all_setscenevalid(self):
90 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 99 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
91 cmd = ["bitbake", "a1"] 100 cmd = ["bitbake", "a1"]
@@ -95,6 +104,8 @@ class RunQueueTests(unittest.TestCase):
95 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 104 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
96 self.assertEqual(set(tasks), set(expected)) 105 self.assertEqual(set(tasks), set(expected))
97 106
107 self.shutdown(tempdir)
108
98 def test_no_settasks(self): 109 def test_no_settasks(self):
99 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 110 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
100 cmd = ["bitbake", "a1", "-c", "patch"] 111 cmd = ["bitbake", "a1", "-c", "patch"]
@@ -103,6 +114,8 @@ class RunQueueTests(unittest.TestCase):
103 expected = ['a1:fetch', 'a1:unpack', 'a1:patch'] 114 expected = ['a1:fetch', 'a1:unpack', 'a1:patch']
104 self.assertEqual(set(tasks), set(expected)) 115 self.assertEqual(set(tasks), set(expected))
105 116
117 self.shutdown(tempdir)
118
106 def test_mix_covered_notcovered(self): 119 def test_mix_covered_notcovered(self):
107 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 120 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
108 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"] 121 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"]
@@ -111,6 +124,7 @@ class RunQueueTests(unittest.TestCase):
111 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene'] 124 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene']
112 self.assertEqual(set(tasks), set(expected)) 125 self.assertEqual(set(tasks), set(expected))
113 126
127 self.shutdown(tempdir)
114 128
115 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks 129 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks
116 def test_mixed_direct_tasks_setscene_tasks(self): 130 def test_mixed_direct_tasks_setscene_tasks(self):
@@ -122,6 +136,8 @@ class RunQueueTests(unittest.TestCase):
122 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 136 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
123 self.assertEqual(set(tasks), set(expected)) 137 self.assertEqual(set(tasks), set(expected))
124 138
139 self.shutdown(tempdir)
140
125 # This test slows down the execution of do_package_setscene until after other real tasks have 141 # This test slows down the execution of do_package_setscene until after other real tasks have
126 # started running which tests for a bug where tasks were being lost from the buildable list of real 142 # started running which tests for a bug where tasks were being lost from the buildable list of real
127 # tasks if they weren't in tasks_covered or tasks_notcovered 143 # tasks if they weren't in tasks_covered or tasks_notcovered
@@ -136,12 +152,14 @@ class RunQueueTests(unittest.TestCase):
136 'a1:populate_sysroot', 'a1:build'] 152 'a1:populate_sysroot', 'a1:build']
137 self.assertEqual(set(tasks), set(expected)) 153 self.assertEqual(set(tasks), set(expected))
138 154
139 def test_setscenewhitelist(self): 155 self.shutdown(tempdir)
156
157 def test_setscene_ignore_tasks(self):
140 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 158 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
141 cmd = ["bitbake", "a1"] 159 cmd = ["bitbake", "a1"]
142 extraenv = { 160 extraenv = {
143 "BB_SETSCENE_ENFORCE" : "1", 161 "BB_SETSCENE_ENFORCE" : "1",
144 "BB_SETSCENE_ENFORCE_WHITELIST" : "a1:do_package_write_rpm a1:do_build" 162 "BB_SETSCENE_ENFORCE_IGNORE_TASKS" : "a1:do_package_write_rpm a1:do_build"
145 } 163 }
146 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot" 164 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot"
147 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv) 165 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv)
@@ -149,6 +167,8 @@ class RunQueueTests(unittest.TestCase):
149 'a1:populate_sysroot_setscene', 'a1:package_setscene'] 167 'a1:populate_sysroot_setscene', 'a1:package_setscene']
150 self.assertEqual(set(tasks), set(expected)) 168 self.assertEqual(set(tasks), set(expected))
151 169
170 self.shutdown(tempdir)
171
152 # Tests for problems with dependencies between setscene tasks 172 # Tests for problems with dependencies between setscene tasks
153 def test_no_setscenevalid_harddeps(self): 173 def test_no_setscenevalid_harddeps(self):
154 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 174 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -162,6 +182,8 @@ class RunQueueTests(unittest.TestCase):
162 'd1:populate_sysroot', 'd1:build'] 182 'd1:populate_sysroot', 'd1:build']
163 self.assertEqual(set(tasks), set(expected)) 183 self.assertEqual(set(tasks), set(expected))
164 184
185 self.shutdown(tempdir)
186
165 def test_no_setscenevalid_withdeps(self): 187 def test_no_setscenevalid_withdeps(self):
166 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 188 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
167 cmd = ["bitbake", "b1"] 189 cmd = ["bitbake", "b1"]
@@ -172,6 +194,8 @@ class RunQueueTests(unittest.TestCase):
172 expected.remove('a1:package_qa') 194 expected.remove('a1:package_qa')
173 self.assertEqual(set(tasks), set(expected)) 195 self.assertEqual(set(tasks), set(expected))
174 196
197 self.shutdown(tempdir)
198
175 def test_single_a1_setscenevalid_withdeps(self): 199 def test_single_a1_setscenevalid_withdeps(self):
176 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 200 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
177 cmd = ["bitbake", "b1"] 201 cmd = ["bitbake", "b1"]
@@ -182,6 +206,8 @@ class RunQueueTests(unittest.TestCase):
182 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks] 206 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks]
183 self.assertEqual(set(tasks), set(expected)) 207 self.assertEqual(set(tasks), set(expected))
184 208
209 self.shutdown(tempdir)
210
185 def test_single_b1_setscenevalid_withdeps(self): 211 def test_single_b1_setscenevalid_withdeps(self):
186 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 212 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
187 cmd = ["bitbake", "b1"] 213 cmd = ["bitbake", "b1"]
@@ -193,6 +219,8 @@ class RunQueueTests(unittest.TestCase):
193 expected.remove('b1:package') 219 expected.remove('b1:package')
194 self.assertEqual(set(tasks), set(expected)) 220 self.assertEqual(set(tasks), set(expected))
195 221
222 self.shutdown(tempdir)
223
196 def test_intermediate_setscenevalid_withdeps(self): 224 def test_intermediate_setscenevalid_withdeps(self):
197 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 225 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
198 cmd = ["bitbake", "b1"] 226 cmd = ["bitbake", "b1"]
@@ -203,6 +231,8 @@ class RunQueueTests(unittest.TestCase):
203 expected.remove('b1:package') 231 expected.remove('b1:package')
204 self.assertEqual(set(tasks), set(expected)) 232 self.assertEqual(set(tasks), set(expected))
205 233
234 self.shutdown(tempdir)
235
206 def test_all_setscenevalid_withdeps(self): 236 def test_all_setscenevalid_withdeps(self):
207 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 237 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
208 cmd = ["bitbake", "b1"] 238 cmd = ["bitbake", "b1"]
@@ -213,6 +243,8 @@ class RunQueueTests(unittest.TestCase):
213 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene'] 243 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene']
214 self.assertEqual(set(tasks), set(expected)) 244 self.assertEqual(set(tasks), set(expected))
215 245
246 self.shutdown(tempdir)
247
216 def test_multiconfig_setscene_optimise(self): 248 def test_multiconfig_setscene_optimise(self):
217 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 249 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
218 extraenv = { 250 extraenv = {
@@ -232,6 +264,8 @@ class RunQueueTests(unittest.TestCase):
232 expected.remove(x) 264 expected.remove(x)
233 self.assertEqual(set(tasks), set(expected)) 265 self.assertEqual(set(tasks), set(expected))
234 266
267 self.shutdown(tempdir)
268
235 def test_multiconfig_bbmask(self): 269 def test_multiconfig_bbmask(self):
236 # This test validates that multiconfigs can independently mask off 270 # This test validates that multiconfigs can independently mask off
237 # recipes they do not want with BBMASK. It works by having recipes 271 # recipes they do not want with BBMASK. It works by having recipes
@@ -248,11 +282,13 @@ class RunQueueTests(unittest.TestCase):
248 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"] 282 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
249 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv) 283 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
250 284
285 self.shutdown(tempdir)
286
251 def test_multiconfig_mcdepends(self): 287 def test_multiconfig_mcdepends(self):
252 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 288 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
253 extraenv = { 289 extraenv = {
254 "BBMULTICONFIG" : "mc-1 mc_2", 290 "BBMULTICONFIG" : "mc-1 mc_2",
255 "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends", 291 "BB_SIGNATURE_HANDLER" : "basichash",
256 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb", 292 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
257 } 293 }
258 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True) 294 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
@@ -278,7 +314,8 @@ class RunQueueTests(unittest.TestCase):
278 ["mc_2:a1:%s" % t for t in rerun_tasks] 314 ["mc_2:a1:%s" % t for t in rerun_tasks]
279 self.assertEqual(set(tasks), set(expected)) 315 self.assertEqual(set(tasks), set(expected))
280 316
281 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') 317 self.shutdown(tempdir)
318
282 def test_hashserv_single(self): 319 def test_hashserv_single(self):
283 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 320 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
284 extraenv = { 321 extraenv = {
@@ -304,7 +341,6 @@ class RunQueueTests(unittest.TestCase):
304 341
305 self.shutdown(tempdir) 342 self.shutdown(tempdir)
306 343
307 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
308 def test_hashserv_double(self): 344 def test_hashserv_double(self):
309 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 345 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
310 extraenv = { 346 extraenv = {
@@ -329,7 +365,6 @@ class RunQueueTests(unittest.TestCase):
329 365
330 self.shutdown(tempdir) 366 self.shutdown(tempdir)
331 367
332 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
333 def test_hashserv_multiple_setscene(self): 368 def test_hashserv_multiple_setscene(self):
334 # Runs e1:do_package_setscene twice 369 # Runs e1:do_package_setscene twice
335 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 370 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -361,7 +396,6 @@ class RunQueueTests(unittest.TestCase):
361 396
362 def shutdown(self, tempdir): 397 def shutdown(self, tempdir):
363 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup 398 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
364 while os.path.exists(tempdir + "/hashserve.sock"): 399 while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")):
365 time.sleep(0.5) 400 time.sleep(0.5)
366 401
367
diff --git a/bitbake/lib/bb/tests/siggen.py b/bitbake/lib/bb/tests/siggen.py
index c21ab4e4fb..0dc67e6cc2 100644
--- a/bitbake/lib/bb/tests/siggen.py
+++ b/bitbake/lib/bb/tests/siggen.py
@@ -17,75 +17,12 @@ import bb.siggen
17 17
18class SiggenTest(unittest.TestCase): 18class SiggenTest(unittest.TestCase):
19 19
20 def test_clean_basepath_simple_target_basepath(self): 20 def test_build_pnid(self):
21 basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' 21 tests = {
22 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask' 22 ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask',
23 ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask',
24 }
23 25
24 actual_cleaned = bb.siggen.clean_basepath(basepath) 26 for t in tests:
27 self.assertEqual(bb.siggen.build_pnid(*t), tests[t])
25 28
26 self.assertEqual(actual_cleaned, expected_cleaned)
27
28 def test_clean_basepath_basic_virtual_basepath(self):
29 basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
30 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
31
32 actual_cleaned = bb.siggen.clean_basepath(basepath)
33
34 self.assertEqual(actual_cleaned, expected_cleaned)
35
36 def test_clean_basepath_mc_basepath(self):
37 basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
38 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
39
40 actual_cleaned = bb.siggen.clean_basepath(basepath)
41
42 self.assertEqual(actual_cleaned, expected_cleaned)
43
44 def test_clean_basepath_virtual_long_prefix_basepath(self):
45 basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
46 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
47
48 actual_cleaned = bb.siggen.clean_basepath(basepath)
49
50 self.assertEqual(actual_cleaned, expected_cleaned)
51
52 def test_clean_basepath_mc_virtual_basepath(self):
53 basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
54 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
55
56 actual_cleaned = bb.siggen.clean_basepath(basepath)
57
58 self.assertEqual(actual_cleaned, expected_cleaned)
59
60 def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
61 basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
62 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
63
64 actual_cleaned = bb.siggen.clean_basepath(basepath)
65
66 self.assertEqual(actual_cleaned, expected_cleaned)
67
68
69 # def test_clean_basepath_performance(self):
70 # input_basepaths = [
71 # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
72 # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
73 # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
74 # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
75 # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
76 # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
77 # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
78 # ]
79
80 # time_start = time.time()
81
82 # i = 2000000
83 # while i >= 0:
84 # for basepath in input_basepaths:
85 # bb.siggen.clean_basepath(basepath)
86 # i -= 1
87
88 # elapsed = time.time() - time_start
89 # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
90
91 # self.assertTrue(False)
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index a7ff33db52..c363f62d7d 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -418,7 +418,7 @@ MULTILINE = " stuff \\
418 ['MULTILINE'], 418 ['MULTILINE'],
419 handle_var) 419 handle_var)
420 420
421 testvalue = re.sub('\s+', ' ', value_in_callback.strip()) 421 testvalue = re.sub(r'\s+', ' ', value_in_callback.strip())
422 self.assertEqual(expected_value, testvalue) 422 self.assertEqual(expected_value, testvalue)
423 423
424class EditBbLayersConf(unittest.TestCase): 424class EditBbLayersConf(unittest.TestCase):
@@ -666,3 +666,21 @@ class GetReferencedVars(unittest.TestCase):
666 666
667 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}] 667 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}]
668 self.check_referenced("${SRC_URI}", layers) 668 self.check_referenced("${SRC_URI}", layers)
669
670
671class EnvironmentTests(unittest.TestCase):
672 def test_environment(self):
673 os.environ["A"] = "this is A"
674 self.assertIn("A", os.environ)
675 self.assertEqual(os.environ["A"], "this is A")
676 self.assertNotIn("B", os.environ)
677
678 with bb.utils.environment(B="this is B"):
679 self.assertIn("A", os.environ)
680 self.assertEqual(os.environ["A"], "this is A")
681 self.assertIn("B", os.environ)
682 self.assertEqual(os.environ["B"], "this is B")
683
684 self.assertIn("A", os.environ)
685 self.assertEqual(os.environ["A"], "this is A")
686 self.assertNotIn("B", os.environ)
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 763c329810..dcd3910cc4 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -10,6 +10,7 @@
10import logging 10import logging
11import os 11import os
12import sys 12import sys
13import time
13import atexit 14import atexit
14import re 15import re
15from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
@@ -52,6 +53,10 @@ class TinfoilDataStoreConnectorVarHistory:
52 def remoteCommand(self, cmd, *args, **kwargs): 53 def remoteCommand(self, cmd, *args, **kwargs):
53 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) 54 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
54 55
56 def emit(self, var, oval, val, o, d):
57 ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
58 o.write(ret)
59
55 def __getattr__(self, name): 60 def __getattr__(self, name):
56 if not hasattr(bb.data_smart.VariableHistory, name): 61 if not hasattr(bb.data_smart.VariableHistory, name):
57 raise AttributeError("VariableHistory has no such method %s" % name) 62 raise AttributeError("VariableHistory has no such method %s" % name)
@@ -320,11 +325,11 @@ class Tinfoil:
320 self.recipes_parsed = False 325 self.recipes_parsed = False
321 self.quiet = 0 326 self.quiet = 0
322 self.oldhandlers = self.logger.handlers[:] 327 self.oldhandlers = self.logger.handlers[:]
328 self.localhandlers = []
323 if setup_logging: 329 if setup_logging:
324 # This is the *client-side* logger, nothing to do with 330 # This is the *client-side* logger, nothing to do with
325 # logging messages from the server 331 # logging messages from the server
326 bb.msg.logger_create('BitBake', output) 332 bb.msg.logger_create('BitBake', output)
327 self.localhandlers = []
328 for handler in self.logger.handlers: 333 for handler in self.logger.handlers:
329 if handler not in self.oldhandlers: 334 if handler not in self.oldhandlers:
330 self.localhandlers.append(handler) 335 self.localhandlers.append(handler)
@@ -440,11 +445,17 @@ class Tinfoil:
440 to initialise Tinfoil and use it with config_only=True first and 445 to initialise Tinfoil and use it with config_only=True first and
441 then conditionally call this function to parse recipes later. 446 then conditionally call this function to parse recipes later.
442 """ 447 """
443 config_params = TinfoilConfigParameters(config_only=False) 448 config_params = TinfoilConfigParameters(config_only=False, quiet=self.quiet)
444 self.run_actions(config_params) 449 self.run_actions(config_params)
445 self.recipes_parsed = True 450 self.recipes_parsed = True
446 451
447 def run_command(self, command, *params): 452 def modified_files(self):
453 """
454 Notify the server it needs to revalidate it's caches since the client has modified files
455 """
456 self.run_command("revalidateCaches")
457
458 def run_command(self, command, *params, handle_events=True):
448 """ 459 """
449 Run a command on the server (as implemented in bb.command). 460 Run a command on the server (as implemented in bb.command).
450 Note that there are two types of command - synchronous and 461 Note that there are two types of command - synchronous and
@@ -464,7 +475,7 @@ class Tinfoil:
464 try: 475 try:
465 result = self.server_connection.connection.runCommand(commandline) 476 result = self.server_connection.connection.runCommand(commandline)
466 finally: 477 finally:
467 while True: 478 while handle_events:
468 event = self.wait_event() 479 event = self.wait_event()
469 if not event: 480 if not event:
470 break 481 break
@@ -489,7 +500,7 @@ class Tinfoil:
489 Wait for an event from the server for the specified time. 500 Wait for an event from the server for the specified time.
490 A timeout of 0 means don't wait if there are no events in the queue. 501 A timeout of 0 means don't wait if there are no events in the queue.
491 Returns the next event in the queue or None if the timeout was 502 Returns the next event in the queue or None if the timeout was
492 reached. Note that in order to recieve any events you will 503 reached. Note that in order to receive any events you will
493 first need to set the internal event mask using set_event_mask() 504 first need to set the internal event mask using set_event_mask()
494 (otherwise whatever event mask the UI set up will be in effect). 505 (otherwise whatever event mask the UI set up will be in effect).
495 """ 506 """
@@ -725,6 +736,7 @@ class Tinfoil:
725 736
726 ret = self.run_command('buildTargets', targets, task) 737 ret = self.run_command('buildTargets', targets, task)
727 if handle_events: 738 if handle_events:
739 lastevent = time.time()
728 result = False 740 result = False
729 # Borrowed from knotty, instead somewhat hackily we use the helper 741 # Borrowed from knotty, instead somewhat hackily we use the helper
730 # as the object to store "shutdown" on 742 # as the object to store "shutdown" on
@@ -737,6 +749,7 @@ class Tinfoil:
737 try: 749 try:
738 event = self.wait_event(0.25) 750 event = self.wait_event(0.25)
739 if event: 751 if event:
752 lastevent = time.time()
740 if event_callback and event_callback(event): 753 if event_callback and event_callback(event):
741 continue 754 continue
742 if helper.eventHandler(event): 755 if helper.eventHandler(event):
@@ -757,7 +770,7 @@ class Tinfoil:
757 if parseprogress: 770 if parseprogress:
758 parseprogress.update(event.progress) 771 parseprogress.update(event.progress)
759 else: 772 else:
760 bb.warn("Got ProcessProgress event for someting that never started?") 773 bb.warn("Got ProcessProgress event for something that never started?")
761 continue 774 continue
762 if isinstance(event, bb.event.ProcessFinished): 775 if isinstance(event, bb.event.ProcessFinished):
763 if self.quiet > 1: 776 if self.quiet > 1:
@@ -769,7 +782,7 @@ class Tinfoil:
769 if isinstance(event, bb.command.CommandCompleted): 782 if isinstance(event, bb.command.CommandCompleted):
770 result = True 783 result = True
771 break 784 break
772 if isinstance(event, bb.command.CommandFailed): 785 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
773 self.logger.error(str(event)) 786 self.logger.error(str(event))
774 result = False 787 result = False
775 break 788 break
@@ -781,10 +794,13 @@ class Tinfoil:
781 self.logger.error(str(event)) 794 self.logger.error(str(event))
782 result = False 795 result = False
783 break 796 break
784
785 elif helper.shutdown > 1: 797 elif helper.shutdown > 1:
786 break 798 break
787 termfilter.updateFooter() 799 termfilter.updateFooter()
800 if time.time() > (lastevent + (3*60)):
801 if not self.run_command('ping', handle_events=False):
802 print("\nUnable to ping server and no events, closing down...\n")
803 return False
788 except KeyboardInterrupt: 804 except KeyboardInterrupt:
789 termfilter.clearFooter() 805 termfilter.clearFooter()
790 if helper.shutdown == 1: 806 if helper.shutdown == 1:
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 43aa592842..8b212b7803 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -45,7 +45,7 @@ from pprint import pformat
45import logging 45import logging
46from datetime import datetime, timedelta 46from datetime import datetime, timedelta
47 47
48from django.db import transaction, connection 48from django.db import transaction
49 49
50 50
51# pylint: disable=invalid-name 51# pylint: disable=invalid-name
@@ -227,6 +227,12 @@ class ORMWrapper(object):
227 build.completed_on = timezone.now() 227 build.completed_on = timezone.now()
228 build.outcome = outcome 228 build.outcome = outcome
229 build.save() 229 build.save()
230
231 # We force a sync point here to force the outcome status commit,
232 # which resolves a race condition with the build completion takedown
233 transaction.set_autocommit(True)
234 transaction.set_autocommit(False)
235
230 signal_runbuilds() 236 signal_runbuilds()
231 237
232 def update_target_set_license_manifest(self, target, license_manifest_path): 238 def update_target_set_license_manifest(self, target, license_manifest_path):
@@ -483,14 +489,14 @@ class ORMWrapper(object):
483 489
484 # we already created the root directory, so ignore any 490 # we already created the root directory, so ignore any
485 # entry for it 491 # entry for it
486 if len(path) == 0: 492 if not path:
487 continue 493 continue
488 494
489 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 495 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
490 if len(parent_path) == 0: 496 if not parent_path:
491 parent_path = "/" 497 parent_path = "/"
492 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 498 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
493 tf_obj = Target_File.objects.create( 499 Target_File.objects.create(
494 target = target_obj, 500 target = target_obj,
495 path = path, 501 path = path,
496 size = size, 502 size = size,
@@ -555,7 +561,7 @@ class ORMWrapper(object):
555 561
556 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
557 563
558 tf_obj = Target_File.objects.create( 564 Target_File.objects.create(
559 target = target_obj, 565 target = target_obj,
560 path = path, 566 path = path,
561 size = size, 567 size = size,
@@ -571,7 +577,7 @@ class ORMWrapper(object):
571 assert isinstance(build_obj, Build) 577 assert isinstance(build_obj, Build)
572 assert isinstance(target_obj, Target) 578 assert isinstance(target_obj, Target)
573 579
574 errormsg = "" 580 errormsg = []
575 for p in packagedict: 581 for p in packagedict:
576 # Search name swtiches round the installed name vs package name 582 # Search name swtiches round the installed name vs package name
577 # by default installed name == package name 583 # by default installed name == package name
@@ -633,10 +639,10 @@ class ORMWrapper(object):
633 packagefile_objects.append(Package_File( package = packagedict[p]['object'], 639 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
634 path = targetpath, 640 path = targetpath,
635 size = targetfilesize)) 641 size = targetfilesize))
636 if len(packagefile_objects): 642 if packagefile_objects:
637 Package_File.objects.bulk_create(packagefile_objects) 643 Package_File.objects.bulk_create(packagefile_objects)
638 except KeyError as e: 644 except KeyError as e:
639 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) 645 errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
640 646
641 # save disk installed size 647 # save disk installed size
642 packagedict[p]['object'].installed_size = packagedict[p]['size'] 648 packagedict[p]['object'].installed_size = packagedict[p]['size']
@@ -673,13 +679,13 @@ class ORMWrapper(object):
673 logger.warning("Could not add dependency to the package %s " 679 logger.warning("Could not add dependency to the package %s "
674 "because %s is an unknown package", p, px) 680 "because %s is an unknown package", p, px)
675 681
676 if len(packagedeps_objs) > 0: 682 if packagedeps_objs:
677 Package_Dependency.objects.bulk_create(packagedeps_objs) 683 Package_Dependency.objects.bulk_create(packagedeps_objs)
678 else: 684 else:
679 logger.info("No package dependencies created") 685 logger.info("No package dependencies created")
680 686
681 if len(errormsg) > 0: 687 if errormsg:
682 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg) 688 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
683 689
684 def save_target_image_file_information(self, target_obj, file_name, file_size): 690 def save_target_image_file_information(self, target_obj, file_name, file_size):
685 Target_Image_File.objects.create(target=target_obj, 691 Target_Image_File.objects.create(target=target_obj,
@@ -767,7 +773,7 @@ class ORMWrapper(object):
767 packagefile_objects.append(Package_File( package = bp_object, 773 packagefile_objects.append(Package_File( package = bp_object,
768 path = path, 774 path = path,
769 size = package_info['FILES_INFO'][path] )) 775 size = package_info['FILES_INFO'][path] ))
770 if len(packagefile_objects): 776 if packagefile_objects:
771 Package_File.objects.bulk_create(packagefile_objects) 777 Package_File.objects.bulk_create(packagefile_objects)
772 778
773 def _po_byname(p): 779 def _po_byname(p):
@@ -809,7 +815,7 @@ class ORMWrapper(object):
809 packagedeps_objs.append(Package_Dependency( package = bp_object, 815 packagedeps_objs.append(Package_Dependency( package = bp_object,
810 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) 816 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
811 817
812 if len(packagedeps_objs) > 0: 818 if packagedeps_objs:
813 Package_Dependency.objects.bulk_create(packagedeps_objs) 819 Package_Dependency.objects.bulk_create(packagedeps_objs)
814 820
815 return bp_object 821 return bp_object
@@ -826,7 +832,7 @@ class ORMWrapper(object):
826 desc = vardump[root_var]['doc'] 832 desc = vardump[root_var]['doc']
827 if desc is None: 833 if desc is None:
828 desc = '' 834 desc = ''
829 if len(desc): 835 if desc:
830 HelpText.objects.get_or_create(build=build_obj, 836 HelpText.objects.get_or_create(build=build_obj,
831 area=HelpText.VARIABLE, 837 area=HelpText.VARIABLE,
832 key=k, text=desc) 838 key=k, text=desc)
@@ -846,7 +852,7 @@ class ORMWrapper(object):
846 file_name = vh['file'], 852 file_name = vh['file'],
847 line_number = vh['line'], 853 line_number = vh['line'],
848 operation = vh['op'])) 854 operation = vh['op']))
849 if len(varhist_objects): 855 if varhist_objects:
850 VariableHistory.objects.bulk_create(varhist_objects) 856 VariableHistory.objects.bulk_create(varhist_objects)
851 857
852 858
@@ -893,9 +899,6 @@ class BuildInfoHelper(object):
893 self.task_order = 0 899 self.task_order = 0
894 self.autocommit_step = 1 900 self.autocommit_step = 1
895 self.server = server 901 self.server = server
896 # we use manual transactions if the database doesn't autocommit on us
897 if not connection.features.autocommits_when_autocommit_is_off:
898 transaction.set_autocommit(False)
899 self.orm_wrapper = ORMWrapper() 902 self.orm_wrapper = ORMWrapper()
900 self.has_build_history = has_build_history 903 self.has_build_history = has_build_history
901 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] 904 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
@@ -1059,27 +1062,6 @@ class BuildInfoHelper(object):
1059 1062
1060 return recipe_info 1063 return recipe_info
1061 1064
1062 def _get_path_information(self, task_object):
1063 self._ensure_build()
1064
1065 assert isinstance(task_object, Task)
1066 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
1067 build_stats_path = []
1068
1069 for t in self.internal_state['targets']:
1070 buildname = self.internal_state['build'].build_name
1071 pe, pv = task_object.recipe.version.split(":",1)
1072 if len(pe) > 0:
1073 package = task_object.recipe.name + "-" + pe + "_" + pv
1074 else:
1075 package = task_object.recipe.name + "-" + pv
1076
1077 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1078 buildname=buildname,
1079 package=package))
1080
1081 return build_stats_path
1082
1083 1065
1084 ################################ 1066 ################################
1085 ## external available methods to store information 1067 ## external available methods to store information
@@ -1313,12 +1295,11 @@ class BuildInfoHelper(object):
1313 task_information['outcome'] = Task.OUTCOME_FAILED 1295 task_information['outcome'] = Task.OUTCOME_FAILED
1314 del self.internal_state['taskdata'][identifier] 1296 del self.internal_state['taskdata'][identifier]
1315 1297
1316 if not connection.features.autocommits_when_autocommit_is_off: 1298 # we force a sync point here, to get the progress bar to show
1317 # we force a sync point here, to get the progress bar to show 1299 if self.autocommit_step % 3 == 0:
1318 if self.autocommit_step % 3 == 0: 1300 transaction.set_autocommit(True)
1319 transaction.set_autocommit(True) 1301 transaction.set_autocommit(False)
1320 transaction.set_autocommit(False) 1302 self.autocommit_step += 1
1321 self.autocommit_step += 1
1322 1303
1323 self.orm_wrapper.get_update_task_object(task_information, True) # must exist 1304 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1324 1305
@@ -1404,7 +1385,7 @@ class BuildInfoHelper(object):
1404 assert 'pn' in event._depgraph 1385 assert 'pn' in event._depgraph
1405 assert 'tdepends' in event._depgraph 1386 assert 'tdepends' in event._depgraph
1406 1387
1407 errormsg = "" 1388 errormsg = []
1408 1389
1409 # save layer version priorities 1390 # save layer version priorities
1410 if 'layer-priorities' in event._depgraph.keys(): 1391 if 'layer-priorities' in event._depgraph.keys():
@@ -1496,7 +1477,7 @@ class BuildInfoHelper(object):
1496 elif dep in self.internal_state['recipes']: 1477 elif dep in self.internal_state['recipes']:
1497 dependency = self.internal_state['recipes'][dep] 1478 dependency = self.internal_state['recipes'][dep]
1498 else: 1479 else:
1499 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep) 1480 errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
1500 continue 1481 continue
1501 recipe_dep = Recipe_Dependency(recipe=target, 1482 recipe_dep = Recipe_Dependency(recipe=target,
1502 depends_on=dependency, 1483 depends_on=dependency,
@@ -1537,8 +1518,8 @@ class BuildInfoHelper(object):
1537 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) 1518 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1538 Task_Dependency.objects.bulk_create(taskdeps_objects) 1519 Task_Dependency.objects.bulk_create(taskdeps_objects)
1539 1520
1540 if len(errormsg) > 0: 1521 if errormsg:
1541 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg) 1522 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
1542 1523
1543 1524
1544 def store_build_package_information(self, event): 1525 def store_build_package_information(self, event):
@@ -1618,7 +1599,7 @@ class BuildInfoHelper(object):
1618 1599
1619 if 'backlog' in self.internal_state: 1600 if 'backlog' in self.internal_state:
1620 # if we have a backlog of events, do our best to save them here 1601 # if we have a backlog of events, do our best to save them here
1621 if len(self.internal_state['backlog']): 1602 if self.internal_state['backlog']:
1622 tempevent = self.internal_state['backlog'].pop() 1603 tempevent = self.internal_state['backlog'].pop()
1623 logger.debug("buildinfohelper: Saving stored event %s " 1604 logger.debug("buildinfohelper: Saving stored event %s "
1624 % tempevent) 1605 % tempevent)
@@ -1765,7 +1746,6 @@ class BuildInfoHelper(object):
1765 1746
1766 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] 1747 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1767 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] 1748 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1768 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1769 1749
1770 # location of the manifest files for this build; 1750 # location of the manifest files for this build;
1771 # note that this file is only produced if an image is produced 1751 # note that this file is only produced if an image is produced
@@ -1786,6 +1766,18 @@ class BuildInfoHelper(object):
1786 # filter out anything which isn't an image target 1766 # filter out anything which isn't an image target
1787 image_targets = [target for target in targets if target.is_image] 1767 image_targets = [target for target in targets if target.is_image]
1788 1768
1769 if len(image_targets) > 0:
1770 #if there are image targets retrieve image_name
1771 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1772 if not image_name:
1773 #When build target is an image and image_name is not found as an environment variable
1774 logger.info("IMAGE_NAME not found, extracting from bitbake command")
1775 cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
1776 #filter out tokens that are command line options
1777 cmd = [token for token in cmd if not token.startswith('-')]
1778 image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
1779 logger.info("IMAGE_NAME found as : %s " % image_name)
1780
1789 for image_target in image_targets: 1781 for image_target in image_targets:
1790 # this is set to True if we find at least one file relating to 1782 # this is set to True if we find at least one file relating to
1791 # this target; if this remains False after the scan, we copy the 1783 # this target; if this remains False after the scan, we copy the
@@ -1990,8 +1982,6 @@ class BuildInfoHelper(object):
1990 # Do not skip command line build events 1982 # Do not skip command line build events
1991 self.store_log_event(tempevent,False) 1983 self.store_log_event(tempevent,False)
1992 1984
1993 if not connection.features.autocommits_when_autocommit_is_off:
1994 transaction.set_autocommit(True)
1995 1985
1996 # unset the brbe; this is to prevent subsequent command-line builds 1986 # unset the brbe; this is to prevent subsequent command-line builds
1997 # being incorrectly attached to the previous Toaster-triggered build; 1987 # being incorrectly attached to the previous Toaster-triggered build;
diff --git a/bitbake/lib/bb/ui/eventreplay.py b/bitbake/lib/bb/ui/eventreplay.py
new file mode 100644
index 0000000000..d62ecbfa56
--- /dev/null
+++ b/bitbake/lib/bb/ui/eventreplay.py
@@ -0,0 +1,86 @@
1#!/usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5# This file re-uses code spread throughout other Bitbake source files.
6# As such, all other copyrights belong to their own right holders.
7#
8
9
10import os
11import sys
12import json
13import pickle
14import codecs
15
16
17class EventPlayer:
18 """Emulate a connection to a bitbake server."""
19
20 def __init__(self, eventfile, variables):
21 self.eventfile = eventfile
22 self.variables = variables
23 self.eventmask = []
24
25 def waitEvent(self, _timeout):
26 """Read event from the file."""
27 line = self.eventfile.readline().strip()
28 if not line:
29 return
30 try:
31 decodedline = json.loads(line)
32 if 'allvariables' in decodedline:
33 self.variables = decodedline['allvariables']
34 return
35 if not 'vars' in decodedline:
36 raise ValueError
37 event_str = decodedline['vars'].encode('utf-8')
38 event = pickle.loads(codecs.decode(event_str, 'base64'))
39 event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
40 if event_name not in self.eventmask:
41 return
42 return event
43 except ValueError as err:
44 print("Failed loading ", line)
45 raise err
46
47 def runCommand(self, command_line):
48 """Emulate running a command on the server."""
49 name = command_line[0]
50
51 if name == "getVariable":
52 var_name = command_line[1]
53 variable = self.variables.get(var_name)
54 if variable:
55 return variable['v'], None
56 return None, "Missing variable %s" % var_name
57
58 elif name == "getAllKeysWithFlags":
59 dump = {}
60 flaglist = command_line[1]
61 for key, val in self.variables.items():
62 try:
63 if not key.startswith("__"):
64 dump[key] = {
65 'v': val['v'],
66 'history' : val['history'],
67 }
68 for flag in flaglist:
69 dump[key][flag] = val[flag]
70 except Exception as err:
71 print(err)
72 return (dump, None)
73
74 elif name == 'setEventMask':
75 self.eventmask = command_line[-1]
76 return True, None
77
78 else:
79 raise Exception("Command %s not implemented" % command_line[0])
80
81 def getEventHandle(self):
82 """
83 This method is called by toasterui.
84 The return value is passed to self.runCommand but not used there.
85 """
86 pass
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 0efa614dfc..f86999bb09 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -21,10 +21,11 @@ import fcntl
21import struct 21import struct
22import copy 22import copy
23import atexit 23import atexit
24from itertools import groupby
24 25
25from bb.ui import uihelper 26from bb.ui import uihelper
26 27
27featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS] 28featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
28 29
29logger = logging.getLogger("BitBake") 30logger = logging.getLogger("BitBake")
30interactive = sys.stdout.isatty() 31interactive = sys.stdout.isatty()
@@ -178,7 +179,7 @@ class TerminalFilter(object):
178 new[3] = new[3] & ~termios.ECHO 179 new[3] = new[3] & ~termios.ECHO
179 termios.tcsetattr(fd, termios.TCSADRAIN, new) 180 termios.tcsetattr(fd, termios.TCSADRAIN, new)
180 curses.setupterm() 181 curses.setupterm()
181 if curses.tigetnum("colors") > 2: 182 if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '':
182 for h in handlers: 183 for h in handlers:
183 try: 184 try:
184 h.formatter.enable_color() 185 h.formatter.enable_color()
@@ -227,7 +228,9 @@ class TerminalFilter(object):
227 228
228 def keepAlive(self, t): 229 def keepAlive(self, t):
229 if not self.cuu: 230 if not self.cuu:
230 print("Bitbake still alive (%ds)" % t) 231 print("Bitbake still alive (no events for %ds). Active tasks:" % t)
232 for t in self.helper.running_tasks:
233 print(t)
231 sys.stdout.flush() 234 sys.stdout.flush()
232 235
233 def updateFooter(self): 236 def updateFooter(self):
@@ -249,58 +252,68 @@ class TerminalFilter(object):
249 return 252 return
250 tasks = [] 253 tasks = []
251 for t in runningpids: 254 for t in runningpids:
255 start_time = activetasks[t].get("starttime", None)
256 if start_time:
257 msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])
258 else:
259 msg = "%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"])
252 progress = activetasks[t].get("progress", None) 260 progress = activetasks[t].get("progress", None)
253 if progress is not None: 261 if progress is not None:
254 pbar = activetasks[t].get("progressbar", None) 262 pbar = activetasks[t].get("progressbar", None)
255 rate = activetasks[t].get("rate", None) 263 rate = activetasks[t].get("rate", None)
256 start_time = activetasks[t].get("starttime", None)
257 if not pbar or pbar.bouncing != (progress < 0): 264 if not pbar or pbar.bouncing != (progress < 0):
258 if progress < 0: 265 if progress < 0:
259 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle) 266 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle)
260 pbar.bouncing = True 267 pbar.bouncing = True
261 else: 268 else:
262 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) 269 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
263 pbar.bouncing = False 270 pbar.bouncing = False
264 activetasks[t]["progressbar"] = pbar 271 activetasks[t]["progressbar"] = pbar
265 tasks.append((pbar, progress, rate, start_time)) 272 tasks.append((pbar, msg, progress, rate, start_time))
266 else: 273 else:
267 start_time = activetasks[t].get("starttime", None) 274 tasks.append(msg)
268 if start_time:
269 tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]))
270 else:
271 tasks.append("%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]))
272 275
273 if self.main.shutdown: 276 if self.main.shutdown:
274 content = "Waiting for %s running tasks to finish:" % len(activetasks) 277 content = pluralise("Waiting for %s running task to finish",
278 "Waiting for %s running tasks to finish", len(activetasks))
279 if not self.quiet:
280 content += ':'
275 print(content) 281 print(content)
276 else: 282 else:
283 scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total)
284 cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
285
286 content = ''
287 if not self.quiet:
288 msg = "Setscene tasks: %s" % scene_tasks
289 content += msg + "\n"
290 print(msg)
291
277 if self.quiet: 292 if self.quiet:
278 content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 293 msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks)
279 elif not len(activetasks): 294 elif not len(activetasks):
280 content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 295 msg = "No currently running tasks (%s)" % cur_tasks
281 else: 296 else:
282 content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total) 297 msg = "Currently %2s running tasks (%s)" % (len(activetasks), cur_tasks)
283 maxtask = self.helper.tasknumber_total 298 maxtask = self.helper.tasknumber_total
284 if not self.main_progress or self.main_progress.maxval != maxtask: 299 if not self.main_progress or self.main_progress.maxval != maxtask:
285 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] 300 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
286 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) 301 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
287 self.main_progress.start(False) 302 self.main_progress.start(False)
288 self.main_progress.setmessage(content) 303 self.main_progress.setmessage(msg)
289 progress = self.helper.tasknumber_current - 1 304 progress = max(0, self.helper.tasknumber_current - 1)
290 if progress < 0: 305 content += self.main_progress.update(progress)
291 progress = 0
292 content = self.main_progress.update(progress)
293 print('') 306 print('')
294 lines = 1 + int(len(content) / (self.columns + 1)) 307 lines = self.getlines(content)
295 if self.quiet == 0: 308 if not self.quiet:
296 for tasknum, task in enumerate(tasks[:(self.rows - 2)]): 309 for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]):
297 if isinstance(task, tuple): 310 if isinstance(task, tuple):
298 pbar, progress, rate, start_time = task 311 pbar, msg, progress, rate, start_time = task
299 if not pbar.start_time: 312 if not pbar.start_time:
300 pbar.start(False) 313 pbar.start(False)
301 if start_time: 314 if start_time:
302 pbar.start_time = start_time 315 pbar.start_time = start_time
303 pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1])) 316 pbar.setmessage('%s: %s' % (tasknum, msg))
304 pbar.setextra(rate) 317 pbar.setextra(rate)
305 if progress > -1: 318 if progress > -1:
306 content = pbar.update(progress) 319 content = pbar.update(progress)
@@ -310,11 +323,17 @@ class TerminalFilter(object):
310 else: 323 else:
311 content = "%s: %s" % (tasknum, task) 324 content = "%s: %s" % (tasknum, task)
312 print(content) 325 print(content)
313 lines = lines + 1 + int(len(content) / (self.columns + 1)) 326 lines = lines + self.getlines(content)
314 self.footer_present = lines 327 self.footer_present = lines
315 self.lastpids = runningpids[:] 328 self.lastpids = runningpids[:]
316 self.lastcount = self.helper.tasknumber_current 329 self.lastcount = self.helper.tasknumber_current
317 330
331 def getlines(self, content):
332 lines = 0
333 for line in content.split("\n"):
334 lines = lines + 1 + int(len(line) / (self.columns + 1))
335 return lines
336
318 def finish(self): 337 def finish(self):
319 if self.stdinbackup: 338 if self.stdinbackup:
320 fd = sys.stdin.fileno() 339 fd = sys.stdin.fileno()
@@ -401,6 +420,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
401 except bb.BBHandledException: 420 except bb.BBHandledException:
402 drain_events_errorhandling(eventHandler) 421 drain_events_errorhandling(eventHandler)
403 return 1 422 return 1
423 except Exception as e:
424 # bitbake-server comms failure
425 early_logger = bb.msg.logger_create('bitbake', sys.stdout)
426 early_logger.fatal("Attempting to set server environment: %s", e)
427 return 1
404 428
405 if params.options.quiet == 0: 429 if params.options.quiet == 0:
406 console_loglevel = loglevel 430 console_loglevel = loglevel
@@ -539,6 +563,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
539 except OSError: 563 except OSError:
540 pass 564 pass
541 565
566 # Add the logging domains specified by the user on the command line
567 for (domainarg, iterator) in groupby(params.debug_domains):
568 dlevel = len(tuple(iterator))
569 l = logconfig["loggers"].setdefault("BitBake.%s" % domainarg, {})
570 l["level"] = logging.DEBUG - dlevel + 1
571 l.setdefault("handlers", []).extend(["BitBake.verbconsole"])
572
542 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile) 573 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile)
543 574
544 if sys.stdin.isatty() and sys.stdout.isatty(): 575 if sys.stdin.isatty() and sys.stdout.isatty():
@@ -559,7 +590,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
559 return 590 return
560 591
561 llevel, debug_domains = bb.msg.constructLogOptions() 592 llevel, debug_domains = bb.msg.constructLogOptions()
562 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list]) 593 try:
594 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
595 except (BrokenPipeError, EOFError) as e:
596 # bitbake-server comms failure
597 logger.fatal("Attempting to set event mask: %s", e)
598 return 1
563 599
564 # The logging_tree module is *extremely* helpful in debugging logging 600 # The logging_tree module is *extremely* helpful in debugging logging
565 # domains. Uncomment here to dump the logging tree when bitbake starts 601 # domains. Uncomment here to dump the logging tree when bitbake starts
@@ -568,7 +604,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
568 604
569 universe = False 605 universe = False
570 if not params.observe_only: 606 if not params.observe_only:
571 params.updateFromServer(server) 607 try:
608 params.updateFromServer(server)
609 except Exception as e:
610 logger.fatal("Fetching command line: %s", e)
611 return 1
572 cmdline = params.parseActions() 612 cmdline = params.parseActions()
573 if not cmdline: 613 if not cmdline:
574 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") 614 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -579,7 +619,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
579 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]: 619 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
580 universe = True 620 universe = True
581 621
582 ret, error = server.runCommand(cmdline['action']) 622 try:
623 ret, error = server.runCommand(cmdline['action'])
624 except (BrokenPipeError, EOFError) as e:
625 # bitbake-server comms failure
626 logger.fatal("Command '{}' failed: %s".format(cmdline), e)
627 return 1
583 if error: 628 if error:
584 logger.error("Command '%s' failed: %s" % (cmdline, error)) 629 logger.error("Command '%s' failed: %s" % (cmdline, error))
585 return 1 630 return 1
@@ -597,26 +642,40 @@ def main(server, eventHandler, params, tf = TerminalFilter):
597 warnings = 0 642 warnings = 0
598 taskfailures = [] 643 taskfailures = []
599 644
600 printinterval = 5000 645 printintervaldelta = 10 * 60 # 10 minutes
601 lastprint = time.time() 646 printinterval = printintervaldelta
647 pinginterval = 1 * 60 # 1 minute
648 lastevent = lastprint = time.time()
602 649
603 termfilter = tf(main, helper, console_handlers, params.options.quiet) 650 termfilter = tf(main, helper, console_handlers, params.options.quiet)
604 atexit.register(termfilter.finish) 651 atexit.register(termfilter.finish)
605 652
606 while True: 653 # shutdown levels
654 # 0 - normal operation
655 # 1 - no new task execution, let current running tasks finish
656 # 2 - interrupting currently executing tasks
657 # 3 - we're done, exit
658 while main.shutdown < 3:
607 try: 659 try:
608 if (lastprint + printinterval) <= time.time(): 660 if (lastprint + printinterval) <= time.time():
609 termfilter.keepAlive(printinterval) 661 termfilter.keepAlive(printinterval)
610 printinterval += 5000 662 printinterval += printintervaldelta
611 event = eventHandler.waitEvent(0) 663 event = eventHandler.waitEvent(0)
612 if event is None: 664 if event is None:
613 if main.shutdown > 1: 665 if (lastevent + pinginterval) <= time.time():
614 break 666 ret, error = server.runCommand(["ping"])
667 if error or not ret:
668 termfilter.clearFooter()
669 print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret)))
670 return_value = 3
671 main.shutdown = 3
672 lastevent = time.time()
615 if not parseprogress: 673 if not parseprogress:
616 termfilter.updateFooter() 674 termfilter.updateFooter()
617 event = eventHandler.waitEvent(0.25) 675 event = eventHandler.waitEvent(0.25)
618 if event is None: 676 if event is None:
619 continue 677 continue
678 lastevent = time.time()
620 helper.eventHandler(event) 679 helper.eventHandler(event)
621 if isinstance(event, bb.runqueue.runQueueExitWait): 680 if isinstance(event, bb.runqueue.runQueueExitWait):
622 if not main.shutdown: 681 if not main.shutdown:
@@ -638,8 +697,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
638 697
639 if isinstance(event, logging.LogRecord): 698 if isinstance(event, logging.LogRecord):
640 lastprint = time.time() 699 lastprint = time.time()
641 printinterval = 5000 700 printinterval = printintervaldelta
642 if event.levelno >= bb.msg.BBLogFormatter.ERROR: 701 if event.levelno >= bb.msg.BBLogFormatter.ERRORONCE:
643 errors = errors + 1 702 errors = errors + 1
644 return_value = 1 703 return_value = 1
645 elif event.levelno == bb.msg.BBLogFormatter.WARNING: 704 elif event.levelno == bb.msg.BBLogFormatter.WARNING:
@@ -653,10 +712,10 @@ def main(server, eventHandler, params, tf = TerminalFilter):
653 continue 712 continue
654 713
655 # Prefix task messages with recipe/task 714 # Prefix task messages with recipe/task
656 if event.taskpid in helper.pidmap and event.levelno != bb.msg.BBLogFormatter.PLAIN: 715 if event.taskpid in helper.pidmap and event.levelno not in [bb.msg.BBLogFormatter.PLAIN, bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
657 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]] 716 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]]
658 event.msg = taskinfo['title'] + ': ' + event.msg 717 event.msg = taskinfo['title'] + ': ' + event.msg
659 if hasattr(event, 'fn'): 718 if hasattr(event, 'fn') and event.levelno not in [bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
660 event.msg = event.fn + ': ' + event.msg 719 event.msg = event.fn + ': ' + event.msg
661 logging.getLogger(event.name).handle(event) 720 logging.getLogger(event.name).handle(event)
662 continue 721 continue
@@ -721,15 +780,15 @@ def main(server, eventHandler, params, tf = TerminalFilter):
721 if event.error: 780 if event.error:
722 errors = errors + 1 781 errors = errors + 1
723 logger.error(str(event)) 782 logger.error(str(event))
724 main.shutdown = 2 783 main.shutdown = 3
725 continue 784 continue
726 if isinstance(event, bb.command.CommandExit): 785 if isinstance(event, bb.command.CommandExit):
727 if not return_value: 786 if not return_value:
728 return_value = event.exitcode 787 return_value = event.exitcode
729 main.shutdown = 2 788 main.shutdown = 3
730 continue 789 continue
731 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)): 790 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
732 main.shutdown = 2 791 main.shutdown = 3
733 continue 792 continue
734 if isinstance(event, bb.event.MultipleProviders): 793 if isinstance(event, bb.event.MultipleProviders):
735 logger.info(str(event)) 794 logger.info(str(event))
@@ -745,7 +804,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
745 continue 804 continue
746 805
747 if isinstance(event, bb.runqueue.sceneQueueTaskStarted): 806 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
748 logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring)) 807 logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring))
749 continue 808 continue
750 809
751 if isinstance(event, bb.runqueue.runQueueTaskStarted): 810 if isinstance(event, bb.runqueue.runQueueTaskStarted):
@@ -814,15 +873,26 @@ def main(server, eventHandler, params, tf = TerminalFilter):
814 873
815 logger.error("Unknown event: %s", event) 874 logger.error("Unknown event: %s", event)
816 875
876 except (BrokenPipeError, EOFError) as e:
877 # bitbake-server comms failure, don't attempt further comms and exit
878 logger.fatal("Executing event: %s", e)
879 return_value = 1
880 errors = errors + 1
881 main.shutdown = 3
817 except EnvironmentError as ioerror: 882 except EnvironmentError as ioerror:
818 termfilter.clearFooter() 883 termfilter.clearFooter()
819 # ignore interrupted io 884 # ignore interrupted io
820 if ioerror.args[0] == 4: 885 if ioerror.args[0] == 4:
821 continue 886 continue
822 sys.stderr.write(str(ioerror)) 887 sys.stderr.write(str(ioerror))
823 if not params.observe_only:
824 _, error = server.runCommand(["stateForceShutdown"])
825 main.shutdown = 2 888 main.shutdown = 2
889 if not params.observe_only:
890 try:
891 _, error = server.runCommand(["stateForceShutdown"])
892 except (BrokenPipeError, EOFError) as e:
893 # bitbake-server comms failure, don't attempt further comms and exit
894 logger.fatal("Unable to force shutdown: %s", e)
895 main.shutdown = 3
826 except KeyboardInterrupt: 896 except KeyboardInterrupt:
827 termfilter.clearFooter() 897 termfilter.clearFooter()
828 if params.observe_only: 898 if params.observe_only:
@@ -831,9 +901,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
831 901
832 def state_force_shutdown(): 902 def state_force_shutdown():
833 print("\nSecond Keyboard Interrupt, stopping...\n") 903 print("\nSecond Keyboard Interrupt, stopping...\n")
834 _, error = server.runCommand(["stateForceShutdown"]) 904 try:
835 if error: 905 _, error = server.runCommand(["stateForceShutdown"])
836 logger.error("Unable to cleanly stop: %s" % error) 906 if error:
907 logger.error("Unable to cleanly stop: %s" % error)
908 except (BrokenPipeError, EOFError) as e:
909 # bitbake-server comms failure
910 logger.fatal("Unable to cleanly stop: %s", e)
837 911
838 if not params.observe_only and main.shutdown == 1: 912 if not params.observe_only and main.shutdown == 1:
839 state_force_shutdown() 913 state_force_shutdown()
@@ -846,17 +920,24 @@ def main(server, eventHandler, params, tf = TerminalFilter):
846 _, error = server.runCommand(["stateShutdown"]) 920 _, error = server.runCommand(["stateShutdown"])
847 if error: 921 if error:
848 logger.error("Unable to cleanly shutdown: %s" % error) 922 logger.error("Unable to cleanly shutdown: %s" % error)
923 except (BrokenPipeError, EOFError) as e:
924 # bitbake-server comms failure
925 logger.fatal("Unable to cleanly shutdown: %s", e)
849 except KeyboardInterrupt: 926 except KeyboardInterrupt:
850 state_force_shutdown() 927 state_force_shutdown()
851 928
852 main.shutdown = main.shutdown + 1 929 main.shutdown = main.shutdown + 1
853 pass
854 except Exception as e: 930 except Exception as e:
855 import traceback 931 import traceback
856 sys.stderr.write(traceback.format_exc()) 932 sys.stderr.write(traceback.format_exc())
857 if not params.observe_only:
858 _, error = server.runCommand(["stateForceShutdown"])
859 main.shutdown = 2 933 main.shutdown = 2
934 if not params.observe_only:
935 try:
936 _, error = server.runCommand(["stateForceShutdown"])
937 except (BrokenPipeError, EOFError) as e:
938 # bitbake-server comms failure, don't attempt further comms and exit
939 logger.fatal("Unable to force shutdown: %s", e)
940 main.shudown = 3
860 return_value = 1 941 return_value = 1
861 try: 942 try:
862 termfilter.clearFooter() 943 termfilter.clearFooter()
@@ -867,11 +948,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
867 for failure in taskfailures: 948 for failure in taskfailures:
868 summary += "\n %s" % failure 949 summary += "\n %s" % failure
869 if warnings: 950 if warnings:
870 summary += pluralise("\nSummary: There was %s WARNING message shown.", 951 summary += pluralise("\nSummary: There was %s WARNING message.",
871 "\nSummary: There were %s WARNING messages shown.", warnings) 952 "\nSummary: There were %s WARNING messages.", warnings)
872 if return_value and errors: 953 if return_value and errors:
873 summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.", 954 summary += pluralise("\nSummary: There was %s ERROR message, returning a non-zero exit code.",
874 "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors) 955 "\nSummary: There were %s ERROR messages, returning a non-zero exit code.", errors)
875 if summary and params.options.quiet == 0: 956 if summary and params.options.quiet == 0:
876 print(summary) 957 print(summary)
877 958
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index cf1c876a51..18a706547a 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -227,6 +227,9 @@ class NCursesUI:
227 shutdown = 0 227 shutdown = 0
228 228
229 try: 229 try:
230 if not params.observe_only:
231 params.updateToServer(server, os.environ.copy())
232
230 params.updateFromServer(server) 233 params.updateFromServer(server)
231 cmdline = params.parseActions() 234 cmdline = params.parseActions()
232 if not cmdline: 235 if not cmdline:
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py
index 2b246710ca..bedfd69b09 100644
--- a/bitbake/lib/bb/ui/taskexp.py
+++ b/bitbake/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
8# 8#
9 9
10import sys 10import sys
11import traceback
11 12
12try: 13try:
13 import gi 14 import gi
@@ -176,7 +177,7 @@ class gtkthread(threading.Thread):
176 quit = threading.Event() 177 quit = threading.Event()
177 def __init__(self, shutdown): 178 def __init__(self, shutdown):
178 threading.Thread.__init__(self) 179 threading.Thread.__init__(self)
179 self.setDaemon(True) 180 self.daemon = True
180 self.shutdown = shutdown 181 self.shutdown = shutdown
181 if not Gtk.init_check()[0]: 182 if not Gtk.init_check()[0]:
182 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") 183 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
196 gtkgui.start() 197 gtkgui.start()
197 198
198 try: 199 try:
200 params.updateToServer(server, os.environ.copy())
199 params.updateFromServer(server) 201 params.updateFromServer(server)
200 cmdline = params.parseActions() 202 cmdline = params.parseActions()
201 if not cmdline: 203 if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
218 except client.Fault as x: 220 except client.Fault as x:
219 print("XMLRPC Fault getting commandline:\n %s" % x) 221 print("XMLRPC Fault getting commandline:\n %s" % x)
220 return 222 return
223 except Exception as e:
224 print("Exception in startup:\n %s" % traceback.format_exc())
225 return
221 226
222 if gtkthread.quit.isSet(): 227 if gtkthread.quit.isSet():
223 return 228 return
diff --git a/bitbake/lib/bb/ui/taskexp_ncurses.py b/bitbake/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 0000000000..ea94a4987f
--- /dev/null
+++ b/bitbake/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
1#
2# BitBake Graphical ncurses-based Dependency Explorer
3# * Based on the GTK implementation
4# * Intended to run on any Linux host
5#
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 - 2008 Richard Purdie
8# Copyright (C) 2022 - 2024 David Reyna
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13#
14# Execution example:
15# $ bitbake -g -u taskexp_ncurses zlib acl
16#
17# Self-test example (executes a script of GUI actions):
18# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
19# ...
20# $ echo $?
21# 0
22# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
23# ERROR: Nothing PROVIDES 'foo'. Close matches:
24# ofono
25# $ echo $?
26# 1
27#
28# Self-test with no terminal example (only tests dependency fetch from bitbake):
29# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
30# $ echo $?
31# 0
32#
33# Features:
34# * Ncurses is used for the presentation layer. Only the 'curses'
35# library is used (none of the extension libraries), plus only
36# one main screen is used (no sub-windows)
37# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
38# dynamic dependency data based on passed recipes
39# * Computes and provides reverse dependencies
40# * Supports task sorting on:
41# (a) Task dependency order within each recipe
42# (b) Pure alphabetical order
43# (c) Provisions for third sort order (bitbake order?)
44# * The 'Filter' does a "*string*" wildcard filter on tasks in the
45# main window, dynamically re-ordering and re-centering the content
46# * A 'Print' function exports the selected task or its whole recipe
47# task set to the default file "taskdep.txt"
48# * Supports a progress bar for bitbake loads and file printing
49# * Line art for box drawing supported, ASCII art an alernative
50# * No horizontal scrolling support. Selected task's full name
51# shown in bottom bar
52# * Dynamically catches terminals that are (or become) too small
53# * Exception to insure return to normal terminal on errors
54# * Debugging support, self test option
55#
56
57import sys
58import traceback
59import curses
60import re
61import time
62
63# Bitbake server support
64import threading
65from xmlrpc import client
66import bb
67import bb.event
68
69# Dependency indexes (depends_model)
70(TYPE_DEP, TYPE_RDEP) = (0, 1)
71DEPENDS_TYPE = 0
72DEPENDS_TASK = 1
73DEPENDS_DEPS = 2
74# Task indexes (task_list)
75TASK_NAME = 0
76TASK_PRIMARY = 1
77TASK_SORT_ALPHA = 2
78TASK_SORT_DEPS = 3
79TASK_SORT_BITBAKE = 4
80# Sort options (default is SORT_DEPS)
81SORT_ALPHA = 0
82SORT_DEPS = 1
83SORT_BITBAKE_ENABLE = False # NOTE: future sort
84SORT_BITBAKE = 2
85sort_model = SORT_DEPS
86# Print options
87PRINT_MODEL_1 = 0
88PRINT_MODEL_2 = 1
89print_model = PRINT_MODEL_2
90print_file_name = "taskdep_print.log"
91print_file_backup_name = "taskdep_print_backup.log"
92is_printed = False
93is_filter = False
94
95# Standard (and backup) key mappings
96CHAR_NUL = 0 # Used as self-test nop char
97CHAR_BS_H = 8 # Alternate backspace key
98CHAR_TAB = 9
99CHAR_RETURN = 10
100CHAR_ESCAPE = 27
101CHAR_UP = ord('{') # Used as self-test ASCII char
102CHAR_DOWN = ord('}') # Used as self-test ASCII char
103
104# Color_pair IDs
105CURSES_NORMAL = 0
106CURSES_HIGHLIGHT = 1
107CURSES_WARNING = 2
108
109
110#################################################
111### Debugging support
112###
113
114verbose = False
115
116# Debug: message display slow-step through display update issues
117def alert(msg,screen):
118 if msg:
119 screen.addstr(0, 10, '[%-4s]' % msg)
120 screen.refresh();
121 curses.napms(2000)
122 else:
123 if do_line_art:
124 for i in range(10, 24):
125 screen.addch(0, i, curses.ACS_HLINE)
126 else:
127 screen.addstr(0, 10, '-' * 14)
128 screen.refresh();
129
130# Debug: display edge conditions on frame movements
131def debug_frame(nbox_ojb):
132 if verbose:
133 nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
134 nbox_ojb.cursor_index,
135 nbox_ojb.cursor_offset,
136 nbox_ojb.scroll_offset,
137 nbox_ojb.inside_height,
138 len(nbox_ojb.task_list),
139 ))
140 nbox_ojb.screen.refresh();
141
142#
143# Unit test (assumes that 'quilt-native' is always present)
144#
145
146unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
147unit_test_cmnds=[
148 '# Default selected task in primary box',
149 'tst_selected=<TASK>.do_recipe_qa',
150 '# Default selected task in deps',
151 'tst_entry=<TAB>',
152 'tst_selected=',
153 '# Default selected task in rdeps',
154 'tst_entry=<TAB>',
155 'tst_selected=<TASK>.do_fetch',
156 "# Test 'select' back to primary box",
157 'tst_entry=<CR>',
158 '#tst_entry=<DOWN>', # optional injected error
159 'tst_selected=<TASK>.do_fetch',
160 '# Check filter',
161 'tst_entry=/uilt-nativ/',
162 'tst_selected=quilt-native.do_recipe_qa',
163 '# Check print',
164 'tst_entry=p',
165 'tst_printed=quilt-native.do_fetch',
166 '#tst_printed=quilt-foo.do_nothing', # optional injected error
167 '# Done!',
168 'tst_entry=q',
169]
170unit_test_idx=0
171unit_test_command_chars=''
172unit_test_results=[]
173def unit_test_action(active_package):
174 global unit_test_idx
175 global unit_test_command_chars
176 global unit_test_results
177 ret = CHAR_NUL
178 if unit_test_command_chars:
179 ch = unit_test_command_chars[0]
180 unit_test_command_chars = unit_test_command_chars[1:]
181 time.sleep(0.5)
182 ret = ord(ch)
183 else:
184 line = unit_test_cmnds[unit_test_idx]
185 unit_test_idx += 1
186 line = re.sub('#.*', '', line).strip()
187 line = line.replace('<TASK>',active_package.primary[0])
188 line = line.replace('<TAB>','\t').replace('<CR>','\n')
189 line = line.replace('<UP>','{').replace('<DOWN>','}')
190 if not line: line = 'nop=nop'
191 cmnd,value = line.split('=')
192 if cmnd == 'tst_entry':
193 unit_test_command_chars = value
194 elif cmnd == 'tst_selected':
195 active_selected = active_package.get_selected()
196 if active_selected != value:
197 unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
198 ret = ord('Q')
199 else:
200 unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
201 elif cmnd == 'tst_printed':
202 result = os.system('grep %s %s' % (value,print_file_name))
203 if result:
204 unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
205 ret = ord('Q')
206 else:
207 unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
208 # Return the action (CHAR_NUL for no action til next round)
209 return(ret)
210
211# Unit test without an interative terminal (e.g. ptest)
212unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
213
214
215#################################################
216### Window frame rendering
217###
218### By default, use the normal line art. Since
219### these extended characters are not ASCII, one
220### must use the ncursus API to render them
221### The alternate ASCII line art set is optionally
222### available via the 'do_line_art' flag
223
224# By default, render frames using line art
225do_line_art = True
226
227# ASCII render set option
228CHAR_HBAR = '-'
229CHAR_VBAR = '|'
230CHAR_UL_CORNER = '/'
231CHAR_UR_CORNER = '\\'
232CHAR_LL_CORNER = '\\'
233CHAR_LR_CORNER = '/'
234
235# Box frame drawing with line-art
236def line_art_frame(box):
237 x = box.base_x
238 y = box.base_y
239 w = box.width
240 h = box.height + 1
241
242 if do_line_art:
243 for i in range(1, w - 1):
244 box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
245 box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
246 body_line = "%s" % (' ' * (w - 2))
247 for i in range(1, h - 1):
248 box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
249 box.screen.addstr(y + i, x + 1, body_line, box.color)
250 box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
251 box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
252 box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
253 box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
254 box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
255 else:
256 top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
257 body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
258 bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
259 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
260 # Top bar
261 box.screen.addstr(y, x, top_line)
262 # Middle frame
263 for i in range(1, (h - 1)):
264 box.screen.addstr(y+i, x, body_line)
265 # Bottom bar
266 box.screen.addstr(y + (h - 1), x, bot_line)
267
268# Connect the separate boxes
269def line_art_fixup(box):
270 if do_line_art:
271 box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
272 box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
273
274
275#################################################
276### Ncurses box object : box frame object to display
277### and manage a sub-window's display elements
278### using basic ncurses
279###
280### Supports:
281### * Frame drawing, content (re)drawing
282### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
283### * Highlighting for active selected item
284### * Content sorting based on selected sort model
285###
286
287class NBox():
288 def __init__(self, screen, label, primary, base_x, base_y, width, height):
289 # Box description
290 self.screen = screen
291 self.label = label
292 self.primary = primary
293 self.color = curses.color_pair(CURSES_NORMAL) if screen else None
294 # Box boundaries
295 self.base_x = base_x
296 self.base_y = base_y
297 self.width = width
298 self.height = height
299 # Cursor/scroll management
300 self.cursor_enable = False
301 self.cursor_index = 0 # Absolute offset
302 self.cursor_offset = 0 # Frame centric offset
303 self.scroll_offset = 0 # Frame centric offset
304 # Box specific content
305 # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
306 self.task_list = []
307
308 @property
309 def inside_width(self):
310 return(self.width-2)
311
312 @property
313 def inside_height(self):
314 return(self.height-2)
315
316 # Populate the box's content, include the sort mappings and is_primary flag
317 def task_list_append(self,task_name,dep):
318 task_sort_alpha = task_name
319 task_sort_deps = dep.get_dep_sort(task_name)
320 is_primary = False
321 for primary in self.primary:
322 if task_name.startswith(primary+'.'):
323 is_primary = True
324 if SORT_BITBAKE_ENABLE:
325 task_sort_bitbake = dep.get_bb_sort(task_name)
326 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
327 else:
328 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
329
330 def reset(self):
331 self.task_list = []
332 self.cursor_index = 0 # Absolute offset
333 self.cursor_offset = 0 # Frame centric offset
334 self.scroll_offset = 0 # Frame centric offset
335
336 # Sort the box's content based on the current sort model
337 def sort(self):
338 if SORT_ALPHA == sort_model:
339 self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
340 elif SORT_DEPS == sort_model:
341 self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
342 elif SORT_BITBAKE == sort_model:
343 self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
344
345 # The target package list (to hightlight), from the command line
346 def set_primary(self,primary):
347 self.primary = primary
348
349 # Draw the box's outside frame
350 def draw_frame(self):
351 line_art_frame(self)
352 # Title
353 self.screen.addstr(self.base_y,
354 (self.base_x + (self.width//2))-((len(self.label)+2)//2),
355 '['+self.label+']')
356 self.screen.refresh()
357
358 # Draw the box's inside text content
359 def redraw(self):
360 task_list_len = len(self.task_list)
361 # Middle frame
362 body_line = "%s" % (' ' * (self.inside_width-1) )
363 for i in range(0,self.inside_height+1):
364 if i < (task_list_len + self.scroll_offset):
365 str_ctl = "%%-%ss" % (self.width-3)
366 # Safety assert
367 if (i + self.scroll_offset) >= task_list_len:
368 alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
369 break
370
371 task_obj = self.task_list[i + self.scroll_offset]
372 task = task_obj[TASK_NAME][:self.inside_width-1]
373 task_primary = task_obj[TASK_PRIMARY]
374
375 if task_primary:
376 line = str_ctl % task[:self.inside_width-1]
377 self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
378 else:
379 line = str_ctl % task[:self.inside_width-1]
380 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
381 else:
382 line = "%s" % (' ' * (self.inside_width-1) )
383 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
384 self.screen.refresh()
385
386 # Show the current selected task over the bottom of the frame
387 def show_selected(self,selected_task):
388 if not selected_task:
389 selected_task = self.get_selected()
390 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
391 self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
392 self.screen.addstr(self.base_y + self.height,
393 (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
394 '['+selected_task+']')
395 self.screen.refresh()
396
397 # Load box with new table of content
398 def update_content(self,task_list):
399 self.task_list = task_list
400 if self.cursor_enable:
401 cursor_update(turn_on=False)
402 self.cursor_index = 0
403 self.cursor_offset = 0
404 self.scroll_offset = 0
405 self.redraw()
406 if self.cursor_enable:
407 cursor_update(turn_on=True)
408
409 # Manage the box's highlighted task and blinking cursor character
410 def cursor_on(self,is_on):
411 self.cursor_enable = is_on
412 self.cursor_update(is_on)
413
414 # High-light the current pointed package, normal for released packages
415 def cursor_update(self,turn_on=True):
416 str_ctl = "%%-%ss" % (self.inside_width-1)
417 try:
418 if len(self.task_list):
419 task_obj = self.task_list[self.cursor_index]
420 task = task_obj[TASK_NAME][:self.inside_width-1]
421 task_primary = task_obj[TASK_PRIMARY]
422 task_font = curses.A_BOLD if task_primary else 0
423 else:
424 task = ''
425 task_font = 0
426 except Exception as e:
427 alert("CURSOR_UPDATE:%s" % (e),self.screen)
428 return
429 if turn_on:
430 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
431 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
432 else:
433 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
434 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
435
436 # Down arrow
437 def line_down(self):
438 if len(self.task_list) <= (self.cursor_index+1):
439 return
440 self.cursor_update(turn_on=False)
441 self.cursor_index += 1
442 self.cursor_offset += 1
443 if self.cursor_offset > (self.inside_height):
444 self.cursor_offset -= 1
445 self.scroll_offset += 1
446 self.redraw()
447 self.cursor_update(turn_on=True)
448 debug_frame(self)
449
450 # Up arrow
451 def line_up(self):
452 if 0 > (self.cursor_index-1):
453 return
454 self.cursor_update(turn_on=False)
455 self.cursor_index -= 1
456 self.cursor_offset -= 1
457 if self.cursor_offset < 0:
458 self.cursor_offset += 1
459 self.scroll_offset -= 1
460 self.redraw()
461 self.cursor_update(turn_on=True)
462 debug_frame(self)
463
464 # Page down
465 def page_down(self):
466 max_task = len(self.task_list)-1
467 if max_task < self.inside_height:
468 return
469 self.cursor_update(turn_on=False)
470 self.cursor_index += 10
471 self.cursor_index = min(self.cursor_index,max_task)
472 self.cursor_offset = min(self.inside_height,self.cursor_index)
473 self.scroll_offset = self.cursor_index - self.cursor_offset
474 self.redraw()
475 self.cursor_update(turn_on=True)
476 debug_frame(self)
477
478 # Page up
479 def page_up(self):
480 max_task = len(self.task_list)-1
481 if max_task < self.inside_height:
482 return
483 self.cursor_update(turn_on=False)
484 self.cursor_index -= 10
485 self.cursor_index = max(self.cursor_index,0)
486 self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
487 self.scroll_offset = self.cursor_index - self.cursor_offset
488 self.redraw()
489 self.cursor_update(turn_on=True)
490 debug_frame(self)
491
492 # Return the currently selected task name for this box
493 def get_selected(self):
494 if self.task_list:
495 return(self.task_list[self.cursor_index][TASK_NAME])
496 else:
497 return('')
498
499#################################################
500### The helper sub-windows
501###
502
503# Show persistent help at the top of the screen
504class HelpBarView(NBox):
505 def __init__(self, screen, label, primary, base_x, base_y, width, height):
506 super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
507
508 def show_help(self,show):
509 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
510 if show:
511 help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
512 bar_size = self.inside_width - 5 - len(help)
513 self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
514 self.screen.refresh()
515
516# Pop up a detailed Help box
517class HelpBoxView(NBox):
518 def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
519 super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
520 self.x_pos = 0
521 self.y_pos = 0
522 self.dep = dep
523
524 # Instantial the pop-up help box
525 def show_help(self,show):
526 self.x_pos = self.base_x + 4
527 self.y_pos = self.base_y + 2
528
529 def add_line(line):
530 if line:
531 self.screen.addstr(self.y_pos,self.x_pos,line)
532 self.y_pos += 1
533
534 # Gather some statisics
535 dep_count = 0
536 rdep_count = 0
537 for task_obj in self.dep.depends_model:
538 if TYPE_DEP == task_obj[DEPENDS_TYPE]:
539 dep_count += 1
540 elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
541 rdep_count += 1
542
543 self.draw_frame()
544 line_art_fixup(self.dep)
545 add_line("Quit : 'q' ")
546 add_line("Filter task names : '/'")
547 add_line("Tab to next box : <Tab>")
548 add_line("Select a task : <Enter>")
549 add_line("Print task's deps : 'p'")
550 add_line("Print recipe's deps : 'P'")
551 add_line(" -> '%s'" % print_file_name)
552 add_line("Sort toggle : 's'")
553 add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
554 add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
555 if SORT_BITBAKE_ENABLE:
556 add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
557 add_line("Alternate backspace : <CTRL-H>")
558 add_line("")
559 add_line("Primary recipes = %s" % ','.join(self.primary))
560 add_line("Task count = %4d" % len(self.dep.pkg_model))
561 add_line("Deps count = %4d" % dep_count)
562 add_line("RDeps count = %4d" % rdep_count)
563 add_line("")
564 self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
565 self.screen.refresh()
566 c = self.screen.getch()
567
568# Show a progress bar
569class ProgressView(NBox):
570 def __init__(self, screen, label, primary, base_x, base_y, width, height):
571 super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
572
573 def progress(self,title,current,max):
574 if title:
575 self.label = title
576 else:
577 title = self.label
578 if max <=0: max = 10
579 bar_size = self.width - 7 - len(title)
580 bar_done = int( (float(current)/float(max)) * float(bar_size) )
581 self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
582 self.screen.refresh()
583 return(current+1)
584
585 def clear(self):
586 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
587 self.screen.refresh()
588
589# Implement a task filter bar
590class FilterView(NBox):
591 SEARCH_NOP = 0
592 SEARCH_GO = 1
593 SEARCH_CANCEL = 2
594
595 def __init__(self, screen, label, primary, base_x, base_y, width, height):
596 super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
597 self.do_show = False
598 self.filter_str = ""
599
600 def clear(self,enable_show=True):
601 self.filter_str = ""
602
603 def show(self,enable_show=True):
604 self.do_show = enable_show
605 if self.do_show:
606 self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
607 else:
608 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
609 self.screen.refresh()
610
611 def show_prompt(self):
612 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
613 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
614
615 # Keys specific to the filter box (start/stop filter keys are in the main loop)
616 def input(self,c,ch):
617 ret = self.SEARCH_GO
618 if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
619 # Backspace
620 if self.filter_str:
621 self.filter_str = self.filter_str[0:-1]
622 self.show()
623 elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
624 # The isalnum() acts strangly with keypad(True), so explicit bounds
625 self.filter_str += ch
626 self.show()
627 else:
628 ret = self.SEARCH_NOP
629 return(ret)
630
631
632#################################################
633### The primary dependency windows
634###
635
636# The main list of package tasks
637class PackageView(NBox):
638 def __init__(self, screen, label, primary, base_x, base_y, width, height):
639 super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
640
641 # Find and verticaly center a selected task (from filter or from dependent box)
642 # The 'task_filter_str' can be a full or a partial (filter) task name
643 def find(self,task_filter_str):
644 found = False
645 max = self.height-2
646 if not task_filter_str:
647 return(found)
648 for i,task_obj in enumerate(self.task_list):
649 task = task_obj[TASK_NAME]
650 if task.startswith(task_filter_str):
651 self.cursor_on(False)
652 self.cursor_index = i
653
654 # Position selected at vertical center
655 vcenter = self.inside_height // 2
656 if self.cursor_index <= vcenter:
657 self.scroll_offset = 0
658 self.cursor_offset = self.cursor_index
659 elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
660 self.cursor_offset = self.inside_height-1
661 self.scroll_offset = self.cursor_index - self.cursor_offset
662 else:
663 self.cursor_offset = vcenter
664 self.scroll_offset = self.cursor_index - self.cursor_offset
665
666 self.redraw()
667 self.cursor_on(True)
668 found = True
669 break
670 return(found)
671
672# The view of dependent packages
673class PackageDepView(NBox):
674 def __init__(self, screen, label, primary, base_x, base_y, width, height):
675 super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
676
677# The view of reverse-dependent packages
678class PackageReverseDepView(NBox):
679 def __init__(self, screen, label, primary, base_x, base_y, width, height):
680 super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
681
682
683#################################################
684### DepExplorer : The parent frame and object
685###
686
687class DepExplorer(NBox):
688 def __init__(self,screen):
689 title = "Task Dependency Explorer"
690 super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
691
692 self.screen = screen
693 self.pkg_model = []
694 self.depends_model = []
695 self.dep_sort_map = {}
696 self.bb_sort_map = {}
697 self.filter_str = ''
698 self.filter_prev = 'deadbeef'
699
700 if self.screen:
701 self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
702 self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
703 self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
704 self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
705 self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
706 self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
707 self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
708 self.draw_frames()
709
710 # Draw this main window's frame and all sub-windows
711 def draw_frames(self):
712 self.draw_frame()
713 self.package_view.draw_frame()
714 self.dep_view.draw_frame()
715 self.reverse_view.draw_frame()
716 if is_filter:
717 self.filter_view.show(True)
718 self.filter_view.show_prompt()
719 else:
720 self.help_bar_view.show_help(True)
721 self.package_view.redraw()
722 self.dep_view.redraw()
723 self.reverse_view.redraw()
724 self.show_selected(self.package_view.get_selected())
725 line_art_fixup(self)
726
727 # Parse the bitbake dependency event object
728 def parse(self, depgraph):
729 for task in depgraph["tdepends"]:
730 self.pkg_model.insert(0, task)
731 for depend in depgraph["tdepends"][task]:
732 self.depends_model.insert (0, (TYPE_DEP, task, depend))
733 self.depends_model.insert (0, (TYPE_RDEP, depend, task))
734 if self.screen:
735 self.dep_sort_prep()
736
737 # Prepare the dependency sort order keys
738 # This method creates sort keys per recipe tasks in
739 # the order of each recipe's internal dependecies
740 # Method:
741 # Filter the tasks in dep order in dep_sort_map = {}
742 # (a) Find a task that has no dependecies
743 # Ignore non-recipe specific tasks
744 # (b) Add it to the sort mapping dict with
745 # key of "<task_group>_<order>"
746 # (c) Remove it as a dependency from the other tasks
747 # (d) Repeat till all tasks are mapped
748 # Use placeholders to insure each sub-dict is instantiated
749 def dep_sort_prep(self):
750 self.progress_view.progress('DepSort',0,4)
751 # Init the task base entries
752 self.progress_view.progress('DepSort',1,4)
753 dep_table = {}
754 bb_index = 0
755 for task in self.pkg_model:
756 # First define the incoming bitbake sort order
757 self.bb_sort_map[task] = "%04d" % (bb_index)
758 bb_index += 1
759 task_group = task[0:task.find('.')]
760 if task_group not in dep_table:
761 dep_table[task_group] = {}
762 dep_table[task_group]['-'] = {} # Placeholder
763 if task not in dep_table[task_group]:
764 dep_table[task_group][task] = {}
765 dep_table[task_group][task]['-'] = {} # Placeholder
766 # Add the task dependecy entries
767 self.progress_view.progress('DepSort',2,4)
768 for task_obj in self.depends_model:
769 if task_obj[DEPENDS_TYPE] != TYPE_DEP:
770 continue
771 task = task_obj[DEPENDS_TASK]
772 task_dep = task_obj[DEPENDS_DEPS]
773 task_group = task[0:task.find('.')]
774 # Only track depends within same group
775 if task_dep.startswith(task_group+'.'):
776 dep_table[task_group][task][task_dep] = 1
777 self.progress_view.progress('DepSort',3,4)
778 for task_group in dep_table:
779 dep_index = 0
780 # Whittle down the tasks of each group
781 this_pass = 1
782 do_loop = True
783 while (len(dep_table[task_group]) > 1) and do_loop:
784 this_pass += 1
785 is_change = False
786 delete_list = []
787 for task in dep_table[task_group]:
788 if '-' == task:
789 continue
790 if 1 == len(dep_table[task_group][task]):
791 is_change = True
792 # No more deps, so collect this task...
793 self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
794 dep_index += 1
795 # ... remove it from other lists as resolved ...
796 for dep_task in dep_table[task_group]:
797 if task in dep_table[task_group][dep_task]:
798 del dep_table[task_group][dep_task][task]
799 # ... and remove it from from the task group
800 delete_list.append(task)
801 for task in delete_list:
802 del dep_table[task_group][task]
803 if not is_change:
804 alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
805 do_loop = False
806 continue
807 self.progress_view.progress('',4,4)
808 self.progress_view.clear()
809 self.help_bar_view.show_help(True)
810 if len(self.dep_sort_map) != len(self.pkg_model):
811 alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
812
813 # Look up a dep sort order key
814 def get_dep_sort(self,key):
815 if key in self.dep_sort_map:
816 return(self.dep_sort_map[key])
817 else:
818 return(key)
819
820 # Look up a bitbake sort order key
821 def get_bb_sort(self,key):
822 if key in self.bb_sort_map:
823 return(self.bb_sort_map[key])
824 else:
825 return(key)
826
827 # Find the selected package in the main frame, update the dependency frames content accordingly
828 def select(self, package_name, only_update_dependents=False):
829 if not package_name:
830 package_name = self.package_view.get_selected()
831 # alert("SELECT:%s:" % package_name,self.screen)
832
833 if self.filter_str != self.filter_prev:
834 self.package_view.cursor_on(False)
835 # Fill of the main package task list using new filter
836 self.package_view.task_list = []
837 for package in self.pkg_model:
838 if self.filter_str:
839 if self.filter_str in package:
840 self.package_view.task_list_append(package,self)
841 else:
842 self.package_view.task_list_append(package,self)
843 self.package_view.sort()
844 self.filter_prev = self.filter_str
845
846 # Old position is lost, assert new position of previous task (if still filtered in)
847 self.package_view.cursor_index = 0
848 self.package_view.cursor_offset = 0
849 self.package_view.scroll_offset = 0
850 self.package_view.redraw()
851 self.package_view.cursor_on(True)
852
853 # Make sure the selected package is in view, with implicit redraw()
854 if (not only_update_dependents):
855 self.package_view.find(package_name)
856 # In case selected name change (i.e. filter removed previous)
857 package_name = self.package_view.get_selected()
858
859 # Filter the package's dependent list to the dependent view
860 self.dep_view.reset()
861 for package_def in self.depends_model:
862 if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
863 self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
864 self.dep_view.sort()
865 self.dep_view.redraw()
866 # Filter the package's dependent list to the reverse dependent view
867 self.reverse_view.reset()
868 for package_def in self.depends_model:
869 if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
870 self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
871 self.reverse_view.sort()
872 self.reverse_view.redraw()
873 self.show_selected(package_name)
874 self.screen.refresh()
875
876 # The print-to-file method
877 def print_deps(self,whole_group=False):
878 global is_printed
879 # Print the selected deptree(s) to a file
880 if not is_printed:
881 try:
882 # Move to backup any exiting file before first write
883 if os.path.isfile(print_file_name):
884 os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
885 except Exception as e:
886 alert(e,self.screen)
887 alert('',self.screen)
888 print_list = []
889 selected_task = self.package_view.get_selected()
890 if not selected_task:
891 return
892 if not whole_group:
893 print_list.append(selected_task)
894 else:
895 # Use the presorted task_group order from 'package_view'
896 task_group = selected_task[0:selected_task.find('.')+1]
897 for task_obj in self.package_view.task_list:
898 task = task_obj[TASK_NAME]
899 if task.startswith(task_group):
900 print_list.append(task)
901 with open(print_file_name, "a") as fd:
902 print_max = len(print_list)
903 print_count = 1
904 self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
905 for task in print_list:
906 print_count = self.progress_view.progress('',print_count,print_max)
907 self.select(task)
908 self.screen.refresh();
909 # Utilize the current print output model
910 if print_model == PRINT_MODEL_1:
911 print("=== Dependendency Snapshot ===",file=fd)
912 print(" = Package =",file=fd)
913 print(' '+task,file=fd)
914 # Fill in the matching dependencies
915 print(" = Dependencies =",file=fd)
916 for task_obj in self.dep_view.task_list:
917 print(' '+ task_obj[TASK_NAME],file=fd)
918 print(" = Dependent Tasks =",file=fd)
919 for task_obj in self.reverse_view.task_list:
920 print(' '+ task_obj[TASK_NAME],file=fd)
921 if print_model == PRINT_MODEL_2:
922 print("=== Dependendency Snapshot ===",file=fd)
923 dep_count = len(self.dep_view.task_list) - 1
924 for i,task_obj in enumerate(self.dep_view.task_list):
925 print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
926 if not self.dep_view.task_list:
927 print('Dep =',file=fd)
928 print("Package=%s" % task,file=fd)
929 for i,task_obj in enumerate(self.reverse_view.task_list):
930 print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
931 if not self.reverse_view.task_list:
932 print('RDep =',file=fd)
933 curses.napms(2000)
934 self.progress_view.clear()
935 self.help_bar_view.show_help(True)
936 print('',file=fd)
937 # Restore display to original selected task
938 self.select(selected_task)
939 is_printed = True
940
941#################################################
942### Load bitbake data
943###
944
945def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
946 global bar_len_old
947 bar_len_old = 0
948
949 # Support no screen
950 def progress(msg,count,max):
951 global bar_len_old
952 if screen:
953 dep.progress_view.progress(msg,count,max)
954 else:
955 if msg:
956 if bar_len_old:
957 bar_len_old = 0
958 print("\n")
959 print(f"{msg}: ({count} of {max})")
960 else:
961 bar_len = int((count*40)/max)
962 if bar_len_old != bar_len:
963 print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
964 bar_len_old = bar_len
965 def clear():
966 if screen:
967 dep.progress_view.clear()
968 def clear_curses(screen):
969 if screen:
970 curses_off(screen)
971
972 #
973 # Trigger bitbake "generateDepTreeEvent"
974 #
975
976 cmdline = ''
977 try:
978 params.updateToServer(server, os.environ.copy())
979 params.updateFromServer(server)
980 cmdline = params.parseActions()
981 if not cmdline:
982 clear_curses(screen)
983 print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
984 return 1,cmdline
985 if 'msg' in cmdline and cmdline['msg']:
986 clear_curses(screen)
987 print('ERROR: ' + cmdline['msg'])
988 return 1,cmdline
989 cmdline = cmdline['action']
990 if not cmdline or cmdline[0] != "generateDotGraph":
991 clear_curses(screen)
992 print("ERROR: This UI requires the -g option")
993 return 1,cmdline
994 ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
995 if error:
996 clear_curses(screen)
997 print("ERROR: running command '%s': %s" % (cmdline, error))
998 return 1,cmdline
999 elif not ret:
1000 clear_curses(screen)
1001 print("ERROR: running command '%s': returned %s" % (cmdline, ret))
1002 return 1,cmdline
1003 except client.Fault as x:
1004 clear_curses(screen)
1005 print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
1006 return 1,cmdline
1007 except Exception as e:
1008 clear_curses(screen)
1009 print("ERROR: in startup:\n %s" % traceback.format_exc())
1010 return 1,cmdline
1011
1012 #
1013 # Receive data from bitbake
1014 #
1015
1016 progress_total = 0
1017 load_bitbake = True
1018 quit = False
1019 try:
1020 while load_bitbake:
1021 try:
1022 event = eventHandler.waitEvent(0.25)
1023 if quit:
1024 _, error = server.runCommand(["stateForceShutdown"])
1025 clear_curses(screen)
1026 if error:
1027 print('Unable to cleanly stop: %s' % error)
1028 break
1029
1030 if event is None:
1031 continue
1032
1033 if isinstance(event, bb.event.CacheLoadStarted):
1034 progress_total = event.total
1035 progress('Loading Cache',0,progress_total)
1036 continue
1037
1038 if isinstance(event, bb.event.CacheLoadProgress):
1039 x = event.current
1040 progress('',x,progress_total)
1041 continue
1042
1043 if isinstance(event, bb.event.CacheLoadCompleted):
1044 clear()
1045 progress('Bitbake... ',1,2)
1046 continue
1047
1048 if isinstance(event, bb.event.ParseStarted):
1049 progress_total = event.total
1050 progress('Processing recipes',0,progress_total)
1051 if progress_total == 0:
1052 continue
1053
1054 if isinstance(event, bb.event.ParseProgress):
1055 x = event.current
1056 progress('',x,progress_total)
1057 continue
1058
1059 if isinstance(event, bb.event.ParseCompleted):
1060 progress('Generating dependency tree',0,3)
1061 continue
1062
1063 if isinstance(event, bb.event.DepTreeGenerated):
1064 progress('Generating dependency tree',1,3)
1065 dep.parse(event._depgraph)
1066 progress('Generating dependency tree',2,3)
1067
1068 if isinstance(event, bb.command.CommandCompleted):
1069 load_bitbake = False
1070 progress('Generating dependency tree',3,3)
1071 clear()
1072 if screen:
1073 dep.help_bar_view.show_help(True)
1074 continue
1075
1076 if isinstance(event, bb.event.NoProvider):
1077 clear_curses(screen)
1078 print('ERROR: %s' % event)
1079
1080 _, error = server.runCommand(["stateShutdown"])
1081 if error:
1082 print('ERROR: Unable to cleanly shutdown: %s' % error)
1083 return 1,cmdline
1084
1085 if isinstance(event, bb.command.CommandFailed):
1086 clear_curses(screen)
1087 print('ERROR: ' + str(event))
1088 return event.exitcode,cmdline
1089
1090 if isinstance(event, bb.command.CommandExit):
1091 clear_curses(screen)
1092 return event.exitcode,cmdline
1093
1094 if isinstance(event, bb.cooker.CookerExit):
1095 break
1096
1097 continue
1098 except EnvironmentError as ioerror:
1099 # ignore interrupted io
1100 if ioerror.args[0] == 4:
1101 pass
1102 except KeyboardInterrupt:
1103 if shutdown == 2:
1104 clear_curses(screen)
1105 print("\nThird Keyboard Interrupt, exit.\n")
1106 break
1107 if shutdown == 1:
1108 clear_curses(screen)
1109 print("\nSecond Keyboard Interrupt, stopping...\n")
1110 _, error = server.runCommand(["stateForceShutdown"])
1111 if error:
1112 print('Unable to cleanly stop: %s' % error)
1113 if shutdown == 0:
1114 clear_curses(screen)
1115 print("\nKeyboard Interrupt, closing down...\n")
1116 _, error = server.runCommand(["stateShutdown"])
1117 if error:
1118 print('Unable to cleanly shutdown: %s' % error)
1119 shutdown = shutdown + 1
1120 pass
1121 except Exception as e:
1122 # Safe exit on error
1123 clear_curses(screen)
1124 print("Exception : %s" % e)
1125 print("Exception in startup:\n %s" % traceback.format_exc())
1126
1127 return 0,cmdline
1128
1129#################################################
1130### main
1131###
1132
1133SCREEN_COL_MIN = 83
1134SCREEN_ROW_MIN = 26
1135
1136def main(server, eventHandler, params):
1137 global verbose
1138 global sort_model
1139 global print_model
1140 global is_printed
1141 global is_filter
1142 global screen_too_small
1143
1144 shutdown = 0
1145 screen_too_small = False
1146 quit = False
1147
1148 # Unit test with no terminal?
1149 if unit_test_noterm:
1150 # Load bitbake, test that there is valid dependency data, then exit
1151 screen = None
1152 print("* UNIT TEST:START")
1153 dep = DepExplorer(screen)
1154 print("* UNIT TEST:BITBAKE FETCH")
1155 ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
1156 if ret:
1157 print("* UNIT TEST: BITBAKE FAILED")
1158 return ret
1159 # Test the acquired dependency data
1160 quilt_native_deps = 0
1161 quilt_native_rdeps = 0
1162 quilt_deps = 0
1163 quilt_rdeps = 0
1164 for i,task_obj in enumerate(dep.depends_model):
1165 if TYPE_DEP == task_obj[0]:
1166 task = task_obj[1]
1167 if task.startswith('quilt-native'):
1168 quilt_native_deps += 1
1169 elif task.startswith('quilt'):
1170 quilt_deps += 1
1171 elif TYPE_RDEP == task_obj[0]:
1172 task = task_obj[1]
1173 if task.startswith('quilt-native'):
1174 quilt_native_rdeps += 1
1175 elif task.startswith('quilt'):
1176 quilt_rdeps += 1
1177 # Print results
1178 failed = False
1179 if 0 < len(dep.depends_model):
1180 print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
1181 else:
1182 failed = True
1183 print(f"FAIL:Bitbake dependency count = 0")
1184 if quilt_native_deps:
1185 print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
1186 else:
1187 failed = True
1188 print(f"FAIL:Quilt-native depends count = 0")
1189 if quilt_native_rdeps:
1190 print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
1191 else:
1192 failed = True
1193 print(f"FAIL:Quilt-native rdepends count = 0")
1194 if quilt_deps:
1195 print(f"Pass:Quilt depends count = {quilt_deps}")
1196 else:
1197 failed = True
1198 print(f"FAIL:Quilt depends count = 0")
1199 if quilt_rdeps:
1200 print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
1201 else:
1202 failed = True
1203 print(f"FAIL:Quilt rdepends count = 0")
1204 print("* UNIT TEST:STOP")
1205 return failed
1206
1207 # Help method to dynamically test parent window too small
1208 def check_screen_size(dep, active_package):
1209 global screen_too_small
1210 rows, cols = screen.getmaxyx()
1211 if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
1212 if screen_too_small:
1213 # Now big enough, remove error message and redraw screen
1214 dep.draw_frames()
1215 active_package.cursor_on(True)
1216 screen_too_small = False
1217 return True
1218 # Test on App init
1219 if not dep:
1220 # Do not start this app if screen not big enough
1221 curses.endwin()
1222 print("")
1223 print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
1224 print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
1225 return False
1226 # First time window too small
1227 if not screen_too_small:
1228 active_package.cursor_on(False)
1229 dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
1230 screen_too_small = True
1231 return False
1232
1233 # Helper method to turn off curses mode
1234 def curses_off(screen):
1235 if not screen: return
1236 # Safe error exit
1237 screen.keypad(False)
1238 curses.echo()
1239 curses.curs_set(1)
1240 curses.endwin()
1241
1242 if unit_test_results:
1243 print('\nUnit Test Results:')
1244 for line in unit_test_results:
1245 print(" %s" % line)
1246
1247 #
1248 # Initialize the ncurse environment
1249 #
1250
1251 screen = curses.initscr()
1252 try:
1253 if not check_screen_size(None, None):
1254 exit(1)
1255 try:
1256 curses.start_color()
1257 curses.use_default_colors();
1258 curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
1259 curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
1260 curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
1261 curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
1262 except:
1263 curses.endwin()
1264 print("")
1265 print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
1266 print(" $ export TERM='xterm-256color'")
1267 exit(1)
1268
1269 screen.keypad(True)
1270 curses.noecho()
1271 curses.curs_set(0)
1272 screen.refresh();
1273 except Exception as e:
1274 # Safe error exit
1275 curses_off(screen)
1276 print("Exception : %s" % e)
1277 print("Exception in startup:\n %s" % traceback.format_exc())
1278 exit(1)
1279
1280 try:
1281 #
1282 # Instantiate the presentation layers
1283 #
1284
1285 dep = DepExplorer(screen)
1286
1287 #
1288 # Prepare bitbake
1289 #
1290
1291 # Fetch bitbake dependecy data
1292 ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
1293 if ret: return ret
1294
1295 #
1296 # Preset the views
1297 #
1298
1299 # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
1300 primary_packages = cmdline[1]
1301 dep.package_view.set_primary(primary_packages)
1302 dep.dep_view.set_primary(primary_packages)
1303 dep.reverse_view.set_primary(primary_packages)
1304 dep.help_box_view.set_primary(primary_packages)
1305 dep.help_bar_view.show_help(True)
1306 active_package = dep.package_view
1307 active_package.cursor_on(True)
1308 dep.select(primary_packages[0]+'.')
1309 if unit_test:
1310 alert('UNIT_TEST',screen)
1311
1312 # Help method to start/stop the filter feature
1313 def filter_mode(new_filter_status):
1314 global is_filter
1315 if is_filter == new_filter_status:
1316 # Ignore no changes
1317 return
1318 if not new_filter_status:
1319 # Turn off
1320 curses.curs_set(0)
1321 #active_package.cursor_on(False)
1322 active_package = dep.package_view
1323 active_package.cursor_on(True)
1324 is_filter = False
1325 dep.help_bar_view.show_help(True)
1326 dep.filter_str = ''
1327 dep.select('')
1328 else:
1329 # Turn on
1330 curses.curs_set(1)
1331 dep.help_bar_view.show_help(False)
1332 dep.filter_view.clear()
1333 dep.filter_view.show(True)
1334 dep.filter_view.show_prompt()
1335 is_filter = True
1336
1337 #
1338 # Main user loop
1339 #
1340
1341 while not quit:
1342 if is_filter:
1343 dep.filter_view.show_prompt()
1344 if unit_test:
1345 c = unit_test_action(active_package)
1346 else:
1347 c = screen.getch()
1348 ch = chr(c)
1349
1350 # Do not draw if window now too small
1351 if not check_screen_size(dep,active_package):
1352 continue
1353
1354 if verbose:
1355 if c == CHAR_RETURN:
1356 screen.addstr(0, 4, "|%3d,CR |" % (c))
1357 else:
1358 screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
1359
1360 # pre-map alternate filter close keys
1361 if is_filter and (c == CHAR_ESCAPE):
1362 # Alternate exit from filter
1363 ch = '/'
1364 c = ord(ch)
1365
1366 # Filter and non-filter mode command keys
1367 # https://docs.python.org/3/library/curses.html
1368 if c in (curses.KEY_UP,CHAR_UP):
1369 active_package.line_up()
1370 if active_package == dep.package_view:
1371 dep.select('',only_update_dependents=True)
1372 elif c in (curses.KEY_DOWN,CHAR_DOWN):
1373 active_package.line_down()
1374 if active_package == dep.package_view:
1375 dep.select('',only_update_dependents=True)
1376 elif curses.KEY_PPAGE == c:
1377 active_package.page_up()
1378 if active_package == dep.package_view:
1379 dep.select('',only_update_dependents=True)
1380 elif curses.KEY_NPAGE == c:
1381 active_package.page_down()
1382 if active_package == dep.package_view:
1383 dep.select('',only_update_dependents=True)
1384 elif CHAR_TAB == c:
1385 # Tab between boxes
1386 active_package.cursor_on(False)
1387 if active_package == dep.package_view:
1388 active_package = dep.dep_view
1389 elif active_package == dep.dep_view:
1390 active_package = dep.reverse_view
1391 else:
1392 active_package = dep.package_view
1393 active_package.cursor_on(True)
1394 elif curses.KEY_BTAB == c:
1395 # Shift-Tab reverse between boxes
1396 active_package.cursor_on(False)
1397 if active_package == dep.package_view:
1398 active_package = dep.reverse_view
1399 elif active_package == dep.reverse_view:
1400 active_package = dep.dep_view
1401 else:
1402 active_package = dep.package_view
1403 active_package.cursor_on(True)
1404 elif (CHAR_RETURN == c):
1405 # CR to select
1406 selected = active_package.get_selected()
1407 if selected:
1408 active_package.cursor_on(False)
1409 active_package = dep.package_view
1410 filter_mode(False)
1411 dep.select(selected)
1412 else:
1413 filter_mode(False)
1414 dep.select(primary_packages[0]+'.')
1415
1416 elif '/' == ch: # Enter/exit dep.filter_view
1417 if is_filter:
1418 filter_mode(False)
1419 else:
1420 filter_mode(True)
1421 elif is_filter:
1422 # If in filter mode, re-direct all these other keys to the filter box
1423 result = dep.filter_view.input(c,ch)
1424 dep.filter_str = dep.filter_view.filter_str
1425 dep.select('')
1426
1427 # Non-filter mode command keys
1428 elif 'p' == ch:
1429 dep.print_deps(whole_group=False)
1430 elif 'P' == ch:
1431 dep.print_deps(whole_group=True)
1432 elif 'w' == ch:
1433 # Toggle the print model
1434 if print_model == PRINT_MODEL_1:
1435 print_model = PRINT_MODEL_2
1436 else:
1437 print_model = PRINT_MODEL_1
1438 elif 's' == ch:
1439 # Toggle the sort model
1440 if sort_model == SORT_DEPS:
1441 sort_model = SORT_ALPHA
1442 elif sort_model == SORT_ALPHA:
1443 if SORT_BITBAKE_ENABLE:
1444 sort_model = TASK_SORT_BITBAKE
1445 else:
1446 sort_model = SORT_DEPS
1447 else:
1448 sort_model = SORT_DEPS
1449 active_package.cursor_on(False)
1450 current_task = active_package.get_selected()
1451 dep.package_view.sort()
1452 dep.dep_view.sort()
1453 dep.reverse_view.sort()
1454 active_package = dep.package_view
1455 active_package.cursor_on(True)
1456 dep.select(current_task)
1457 # Announce the new sort model
1458 alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
1459 alert('',screen)
1460
1461 elif 'q' == ch:
1462 quit = True
1463 elif ch in ('h','?'):
1464 dep.help_box_view.show_help(True)
1465 dep.select(active_package.get_selected())
1466
1467 #
1468 # Debugging commands
1469 #
1470
1471 elif 'V' == ch:
1472 verbose = not verbose
1473 alert('Verbose=%s' % str(verbose),screen)
1474 alert('',screen)
1475 elif 'R' == ch:
1476 screen.refresh()
1477 elif 'B' == ch:
1478 # Progress bar unit test
1479 dep.progress_view.progress('Test',0,40)
1480 curses.napms(1000)
1481 dep.progress_view.progress('',10,40)
1482 curses.napms(1000)
1483 dep.progress_view.progress('',20,40)
1484 curses.napms(1000)
1485 dep.progress_view.progress('',30,40)
1486 curses.napms(1000)
1487 dep.progress_view.progress('',40,40)
1488 curses.napms(1000)
1489 dep.progress_view.clear()
1490 dep.help_bar_view.show_help(True)
1491 elif 'Q' == ch:
1492 # Simulated error
1493 curses_off(screen)
1494 print('ERROR: simulated error exit')
1495 return 1
1496
1497 # Safe exit
1498 curses_off(screen)
1499 except Exception as e:
1500 # Safe exit on error
1501 curses_off(screen)
1502 print("Exception : %s" % e)
1503 print("Exception in startup:\n %s" % traceback.format_exc())
1504
1505 # Reminder to pick up your printed results
1506 if is_printed:
1507 print("")
1508 print("You have output ready!")
1509 print(" * Your printed dependency file is: %s" % print_file_name)
1510 print(" * Your previous results saved in: %s" % print_file_backup_name)
1511 print("")
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index ec5bd4f105..6bd21f1844 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -385,7 +385,7 @@ def main(server, eventHandler, params):
385 main.shutdown = 1 385 main.shutdown = 1
386 386
387 logger.info("ToasterUI build done, brbe: %s", brbe) 387 logger.info("ToasterUI build done, brbe: %s", brbe)
388 continue 388 break
389 389
390 if isinstance(event, (bb.command.CommandCompleted, 390 if isinstance(event, (bb.command.CommandCompleted,
391 bb.command.CommandFailed, 391 bb.command.CommandFailed,
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index 8607d0523b..c2f830d530 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -44,7 +44,7 @@ class BBUIEventQueue:
44 for count_tries in range(5): 44 for count_tries in range(5):
45 ret = self.BBServer.registerEventHandler(self.host, self.port) 45 ret = self.BBServer.registerEventHandler(self.host, self.port)
46 46
47 if isinstance(ret, collections.Iterable): 47 if isinstance(ret, collections.abc.Iterable):
48 self.EventHandle, error = ret 48 self.EventHandle, error = ret
49 else: 49 else:
50 self.EventHandle = ret 50 self.EventHandle = ret
@@ -65,35 +65,27 @@ class BBUIEventQueue:
65 self.server = server 65 self.server = server
66 66
67 self.t = threading.Thread() 67 self.t = threading.Thread()
68 self.t.setDaemon(True) 68 self.t.daemon = True
69 self.t.run = self.startCallbackHandler 69 self.t.run = self.startCallbackHandler
70 self.t.start() 70 self.t.start()
71 71
72 def getEvent(self): 72 def getEvent(self):
73 73 with bb.utils.lock_timeout(self.eventQueueLock):
74 self.eventQueueLock.acquire() 74 if not self.eventQueue:
75 75 return None
76 if len(self.eventQueue) == 0: 76 item = self.eventQueue.pop(0)
77 self.eventQueueLock.release() 77 if not self.eventQueue:
78 return None 78 self.eventQueueNotify.clear()
79 79 return item
80 item = self.eventQueue.pop(0)
81
82 if len(self.eventQueue) == 0:
83 self.eventQueueNotify.clear()
84
85 self.eventQueueLock.release()
86 return item
87 80
88 def waitEvent(self, delay): 81 def waitEvent(self, delay):
89 self.eventQueueNotify.wait(delay) 82 self.eventQueueNotify.wait(delay)
90 return self.getEvent() 83 return self.getEvent()
91 84
92 def queue_event(self, event): 85 def queue_event(self, event):
93 self.eventQueueLock.acquire() 86 with bb.utils.lock_timeout(self.eventQueueLock):
94 self.eventQueue.append(event) 87 self.eventQueue.append(event)
95 self.eventQueueNotify.set() 88 self.eventQueueNotify.set()
96 self.eventQueueLock.release()
97 89
98 def send_event(self, event): 90 def send_event(self, event):
99 self.queue_event(pickle.loads(event)) 91 self.queue_event(pickle.loads(event))
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 48d808ae28..82913e0da8 100644
--- a/bitbake/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -49,9 +49,11 @@ class BBUIHelper:
49 tid = event._fn + ":" + event._task 49 tid = event._fn + ":" + event._task
50 removetid(event.pid, tid) 50 removetid(event.pid, tid)
51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) 51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
52 elif isinstance(event, bb.runqueue.runQueueTaskStarted): 52 elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1 53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed
54 self.tasknumber_total = event.stats.total 54 self.tasknumber_total = event.stats.total
55 self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered
56 self.setscene_total = event.stats.setscene_total
55 self.needUpdate = True 57 self.needUpdate = True
56 elif isinstance(event, bb.build.TaskProgress): 58 elif isinstance(event, bb.build.TaskProgress):
57 if event.pid > 0 and event.pid in self.pidmap: 59 if event.pid > 0 and event.pid in self.pidmap:
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index b282d09abf..ebee65d3dd 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -13,10 +13,12 @@ import errno
13import logging 13import logging
14import bb 14import bb
15import bb.msg 15import bb.msg
16import locale
16import multiprocessing 17import multiprocessing
17import fcntl 18import fcntl
18import importlib 19import importlib
19from importlib import machinery 20import importlib.machinery
21import importlib.util
20import itertools 22import itertools
21import subprocess 23import subprocess
22import glob 24import glob
@@ -26,6 +28,11 @@ import errno
26import signal 28import signal
27import collections 29import collections
28import copy 30import copy
31import ctypes
32import random
33import socket
34import struct
35import tempfile
29from subprocess import getstatusoutput 36from subprocess import getstatusoutput
30from contextlib import contextmanager 37from contextlib import contextmanager
31from ctypes import cdll 38from ctypes import cdll
@@ -43,7 +50,7 @@ def clean_context():
43 50
44def get_context(): 51def get_context():
45 return _context 52 return _context
46 53
47 54
48def set_context(ctx): 55def set_context(ctx):
49 _context = ctx 56 _context = ctx
@@ -205,8 +212,8 @@ def explode_dep_versions2(s, *, sort=True):
205 inversion = True 212 inversion = True
206 # This list is based on behavior and supported comparisons from deb, opkg and rpm. 213 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
207 # 214 #
208 # Even though =<, <<, ==, !=, =>, and >> may not be supported, 215 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
209 # we list each possibly valid item. 216 # we list each possibly valid item.
210 # The build system is responsible for validation of what it supports. 217 # The build system is responsible for validation of what it supports.
211 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): 218 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
212 lastcmp = i[0:2] 219 lastcmp = i[0:2]
@@ -251,7 +258,7 @@ def explode_dep_versions(s):
251 """ 258 """
252 Take an RDEPENDS style string of format: 259 Take an RDEPENDS style string of format:
253 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 260 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
254 skip null value and items appeared in dependancy string multiple times 261 skip null value and items appeared in dependency string multiple times
255 and return a dictionary of dependencies and versions. 262 and return a dictionary of dependencies and versions.
256 """ 263 """
257 r = explode_dep_versions2(s) 264 r = explode_dep_versions2(s)
@@ -340,7 +347,7 @@ def _print_exception(t, value, tb, realfile, text, context):
340 exception = traceback.format_exception_only(t, value) 347 exception = traceback.format_exception_only(t, value)
341 error.append('Error executing a python function in %s:\n' % realfile) 348 error.append('Error executing a python function in %s:\n' % realfile)
342 349
343 # Strip 'us' from the stack (better_exec call) unless that was where the 350 # Strip 'us' from the stack (better_exec call) unless that was where the
344 # error came from 351 # error came from
345 if tb.tb_next is not None: 352 if tb.tb_next is not None:
346 tb = tb.tb_next 353 tb = tb.tb_next
@@ -379,7 +386,7 @@ def _print_exception(t, value, tb, realfile, text, context):
379 386
380 error.append("Exception: %s" % ''.join(exception)) 387 error.append("Exception: %s" % ''.join(exception))
381 388
382 # If the exception is from spwaning a task, let's be helpful and display 389 # If the exception is from spawning a task, let's be helpful and display
383 # the output (which hopefully includes stderr). 390 # the output (which hopefully includes stderr).
384 if isinstance(value, subprocess.CalledProcessError) and value.output: 391 if isinstance(value, subprocess.CalledProcessError) and value.output:
385 error.append("Subprocess output:") 392 error.append("Subprocess output:")
@@ -400,7 +407,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
400 code = better_compile(code, realfile, realfile) 407 code = better_compile(code, realfile, realfile)
401 try: 408 try:
402 exec(code, get_context(), context) 409 exec(code, get_context(), context)
403 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): 410 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
404 # Error already shown so passthrough, no need for traceback 411 # Error already shown so passthrough, no need for traceback
405 raise 412 raise
406 except Exception as e: 413 except Exception as e:
@@ -427,12 +434,14 @@ def better_eval(source, locals, extraglobals = None):
427 return eval(source, ctx, locals) 434 return eval(source, ctx, locals)
428 435
429@contextmanager 436@contextmanager
430def fileslocked(files): 437def fileslocked(files, *args, **kwargs):
431 """Context manager for locking and unlocking file locks.""" 438 """Context manager for locking and unlocking file locks."""
432 locks = [] 439 locks = []
433 if files: 440 if files:
434 for lockfile in files: 441 for lockfile in files:
435 locks.append(bb.utils.lockfile(lockfile)) 442 l = bb.utils.lockfile(lockfile, *args, **kwargs)
443 if l is not None:
444 locks.append(l)
436 445
437 try: 446 try:
438 yield 447 yield
@@ -451,9 +460,16 @@ def lockfile(name, shared=False, retry=True, block=False):
451 consider the possibility of sending a signal to the process to break 460 consider the possibility of sending a signal to the process to break
452 out - at which point you want block=True rather than retry=True. 461 out - at which point you want block=True rather than retry=True.
453 """ 462 """
463 basename = os.path.basename(name)
464 if len(basename) > 255:
465 root, ext = os.path.splitext(basename)
466 basename = root[:255 - len(ext)] + ext
467
454 dirname = os.path.dirname(name) 468 dirname = os.path.dirname(name)
455 mkdirhier(dirname) 469 mkdirhier(dirname)
456 470
471 name = os.path.join(dirname, basename)
472
457 if not os.access(dirname, os.W_OK): 473 if not os.access(dirname, os.W_OK):
458 logger.error("Unable to acquire lock '%s', directory is not writable", 474 logger.error("Unable to acquire lock '%s', directory is not writable",
459 name) 475 name)
@@ -487,7 +503,7 @@ def lockfile(name, shared=False, retry=True, block=False):
487 return lf 503 return lf
488 lf.close() 504 lf.close()
489 except OSError as e: 505 except OSError as e:
490 if e.errno == errno.EACCES: 506 if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
491 logger.error("Unable to acquire lock '%s', %s", 507 logger.error("Unable to acquire lock '%s', %s",
492 e.strerror, name) 508 e.strerror, name)
493 sys.exit(1) 509 sys.exit(1)
@@ -532,7 +548,12 @@ def md5_file(filename):
532 Return the hex string representation of the MD5 checksum of filename. 548 Return the hex string representation of the MD5 checksum of filename.
533 """ 549 """
534 import hashlib 550 import hashlib
535 return _hasher(hashlib.md5(), filename) 551 try:
552 sig = hashlib.new('MD5', usedforsecurity=False)
553 except TypeError:
554 # Some configurations don't appear to support two arguments
555 sig = hashlib.new('MD5')
556 return _hasher(sig, filename)
536 557
537def sha256_file(filename): 558def sha256_file(filename):
538 """ 559 """
@@ -583,11 +604,25 @@ def preserved_envvars():
583 v = [ 604 v = [
584 'BBPATH', 605 'BBPATH',
585 'BB_PRESERVE_ENV', 606 'BB_PRESERVE_ENV',
586 'BB_ENV_WHITELIST', 607 'BB_ENV_PASSTHROUGH_ADDITIONS',
587 'BB_ENV_EXTRAWHITE',
588 ] 608 ]
589 return v + preserved_envvars_exported() 609 return v + preserved_envvars_exported()
590 610
611def check_system_locale():
612 """Make sure the required system locale are available and configured"""
613 default_locale = locale.getlocale(locale.LC_CTYPE)
614
615 try:
616 locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
617 except:
618 sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
619 else:
620 locale.setlocale(locale.LC_CTYPE, default_locale)
621
622 if sys.getfilesystemencoding() != "utf-8":
623 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
624 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
625
591def filter_environment(good_vars): 626def filter_environment(good_vars):
592 """ 627 """
593 Create a pristine environment for bitbake. This will remove variables that 628 Create a pristine environment for bitbake. This will remove variables that
@@ -615,21 +650,21 @@ def filter_environment(good_vars):
615 650
616def approved_variables(): 651def approved_variables():
617 """ 652 """
618 Determine and return the list of whitelisted variables which are approved 653 Determine and return the list of variables which are approved
619 to remain in the environment. 654 to remain in the environment.
620 """ 655 """
621 if 'BB_PRESERVE_ENV' in os.environ: 656 if 'BB_PRESERVE_ENV' in os.environ:
622 return os.environ.keys() 657 return os.environ.keys()
623 approved = [] 658 approved = []
624 if 'BB_ENV_WHITELIST' in os.environ: 659 if 'BB_ENV_PASSTHROUGH' in os.environ:
625 approved = os.environ['BB_ENV_WHITELIST'].split() 660 approved = os.environ['BB_ENV_PASSTHROUGH'].split()
626 approved.extend(['BB_ENV_WHITELIST']) 661 approved.extend(['BB_ENV_PASSTHROUGH'])
627 else: 662 else:
628 approved = preserved_envvars() 663 approved = preserved_envvars()
629 if 'BB_ENV_EXTRAWHITE' in os.environ: 664 if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
630 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) 665 approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
631 if 'BB_ENV_EXTRAWHITE' not in approved: 666 if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
632 approved.extend(['BB_ENV_EXTRAWHITE']) 667 approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
633 return approved 668 return approved
634 669
635def clean_environment(): 670def clean_environment():
@@ -683,8 +718,8 @@ def remove(path, recurse=False, ionice=False):
683 return 718 return
684 if recurse: 719 if recurse:
685 for name in glob.glob(path): 720 for name in glob.glob(path):
686 if _check_unsafe_delete_path(path): 721 if _check_unsafe_delete_path(name):
687 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) 722 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
688 # shutil.rmtree(name) would be ideal but its too slow 723 # shutil.rmtree(name) would be ideal but its too slow
689 cmd = [] 724 cmd = []
690 if ionice: 725 if ionice:
@@ -710,9 +745,9 @@ def prunedir(topdir, ionice=False):
710# but thats possibly insane and suffixes is probably going to be small 745# but thats possibly insane and suffixes is probably going to be small
711# 746#
712def prune_suffix(var, suffixes, d): 747def prune_suffix(var, suffixes, d):
713 """ 748 """
714 See if var ends with any of the suffixes listed and 749 See if var ends with any of the suffixes listed and
715 remove it if found 750 remove it if found
716 """ 751 """
717 for suffix in suffixes: 752 for suffix in suffixes:
718 if suffix and var.endswith(suffix): 753 if suffix and var.endswith(suffix):
@@ -723,7 +758,8 @@ def mkdirhier(directory):
723 """Create a directory like 'mkdir -p', but does not complain if 758 """Create a directory like 'mkdir -p', but does not complain if
724 directory already exists like os.makedirs 759 directory already exists like os.makedirs
725 """ 760 """
726 761 if '${' in str(directory):
762 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
727 try: 763 try:
728 os.makedirs(directory) 764 os.makedirs(directory)
729 except OSError as e: 765 except OSError as e:
@@ -742,7 +778,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
742 if not sstat: 778 if not sstat:
743 sstat = os.lstat(src) 779 sstat = os.lstat(src)
744 except Exception as e: 780 except Exception as e:
745 print("movefile: Stating source file failed...", e) 781 logger.warning("movefile: Stating source file failed...", e)
746 return None 782 return None
747 783
748 destexists = 1 784 destexists = 1
@@ -770,7 +806,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
770 os.unlink(src) 806 os.unlink(src)
771 return os.lstat(dest) 807 return os.lstat(dest)
772 except Exception as e: 808 except Exception as e:
773 print("movefile: failed to properly create symlink:", dest, "->", target, e) 809 logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
774 return None 810 return None
775 811
776 renamefailed = 1 812 renamefailed = 1
@@ -782,12 +818,12 @@ def movefile(src, dest, newmtime = None, sstat = None):
782 818
783 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: 819 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
784 try: 820 try:
785 os.rename(src, destpath) 821 bb.utils.rename(src, destpath)
786 renamefailed = 0 822 renamefailed = 0
787 except Exception as e: 823 except Exception as e:
788 if e.errno != errno.EXDEV: 824 if e.errno != errno.EXDEV:
789 # Some random error. 825 # Some random error.
790 print("movefile: Failed to move", src, "to", dest, e) 826 logger.warning("movefile: Failed to move", src, "to", dest, e)
791 return None 827 return None
792 # Invalid cross-device-link 'bind' mounted or actually Cross-Device 828 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
793 829
@@ -796,16 +832,16 @@ def movefile(src, dest, newmtime = None, sstat = None):
796 if stat.S_ISREG(sstat[stat.ST_MODE]): 832 if stat.S_ISREG(sstat[stat.ST_MODE]):
797 try: # For safety copy then move it over. 833 try: # For safety copy then move it over.
798 shutil.copyfile(src, destpath + "#new") 834 shutil.copyfile(src, destpath + "#new")
799 os.rename(destpath + "#new", destpath) 835 bb.utils.rename(destpath + "#new", destpath)
800 didcopy = 1 836 didcopy = 1
801 except Exception as e: 837 except Exception as e:
802 print('movefile: copy', src, '->', dest, 'failed.', e) 838 logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
803 return None 839 return None
804 else: 840 else:
805 #we don't yet handle special, so we need to fall back to /bin/mv 841 #we don't yet handle special, so we need to fall back to /bin/mv
806 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") 842 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
807 if a[0] != 0: 843 if a[0] != 0:
808 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) 844 logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
809 return None # failure 845 return None # failure
810 try: 846 try:
811 if didcopy: 847 if didcopy:
@@ -813,7 +849,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
813 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 849 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
814 os.unlink(src) 850 os.unlink(src)
815 except Exception as e: 851 except Exception as e:
816 print("movefile: Failed to chown/chmod/unlink", dest, e) 852 logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
817 return None 853 return None
818 854
819 if newmtime: 855 if newmtime:
@@ -874,7 +910,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
874 910
875 # For safety copy then move it over. 911 # For safety copy then move it over.
876 shutil.copyfile(src, dest + "#new") 912 shutil.copyfile(src, dest + "#new")
877 os.rename(dest + "#new", dest) 913 bb.utils.rename(dest + "#new", dest)
878 except Exception as e: 914 except Exception as e:
879 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) 915 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
880 return False 916 return False
@@ -965,13 +1001,16 @@ def umask(new_mask):
965 os.umask(current_mask) 1001 os.umask(current_mask)
966 1002
967def to_boolean(string, default=None): 1003def to_boolean(string, default=None):
968 """ 1004 """
969 Check input string and return boolean value True/False/None 1005 Check input string and return boolean value True/False/None
970 depending upon the checks 1006 depending upon the checks
971 """ 1007 """
972 if not string: 1008 if not string:
973 return default 1009 return default
974 1010
1011 if isinstance(string, int):
1012 return string != 0
1013
975 normalized = string.lower() 1014 normalized = string.lower()
976 if normalized in ("y", "yes", "1", "true"): 1015 if normalized in ("y", "yes", "1", "true"):
977 return True 1016 return True
@@ -1103,7 +1142,10 @@ def get_referenced_vars(start_expr, d):
1103 1142
1104 1143
1105def cpu_count(): 1144def cpu_count():
1106 return multiprocessing.cpu_count() 1145 try:
1146 return len(os.sched_getaffinity(0))
1147 except OSError:
1148 return multiprocessing.cpu_count()
1107 1149
1108def nonblockingfd(fd): 1150def nonblockingfd(fd):
1109 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) 1151 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
@@ -1178,7 +1220,7 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1178 variables: a list of variable names to look for. Functions 1220 variables: a list of variable names to look for. Functions
1179 may also be specified, but must be specified with '()' at 1221 may also be specified, but must be specified with '()' at
1180 the end of the name. Note that the function doesn't have 1222 the end of the name. Note that the function doesn't have
1181 any intrinsic understanding of _append, _prepend, _remove, 1223 any intrinsic understanding of :append, :prepend, :remove,
1182 or overrides, so these are considered as part of the name. 1224 or overrides, so these are considered as part of the name.
1183 These values go into a regular expression, so regular 1225 These values go into a regular expression, so regular
1184 expression syntax is allowed. 1226 expression syntax is allowed.
@@ -1590,33 +1632,89 @@ def set_process_name(name):
1590 except: 1632 except:
1591 pass 1633 pass
1592 1634
1635def enable_loopback_networking():
1636 # From bits/ioctls.h
1637 SIOCGIFFLAGS = 0x8913
1638 SIOCSIFFLAGS = 0x8914
1639 SIOCSIFADDR = 0x8916
1640 SIOCSIFNETMASK = 0x891C
1641
1642 # if.h
1643 IFF_UP = 0x1
1644 IFF_RUNNING = 0x40
1645
1646 # bits/socket.h
1647 AF_INET = 2
1648
1649 # char ifr_name[IFNAMSIZ=16]
1650 ifr_name = struct.pack("@16s", b"lo")
1651 def netdev_req(fd, req, data = b""):
1652 # Pad and add interface name
1653 data = ifr_name + data + (b'\x00' * (16 - len(data)))
1654 # Return all data after interface name
1655 return fcntl.ioctl(fd, req, data)[16:]
1656
1657 with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1658 fd = sock.fileno()
1659
1660 # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1661 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1662 netdev_req(fd, SIOCSIFADDR, req)
1663
1664 # short ifr_flags
1665 flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1666 flags |= IFF_UP | IFF_RUNNING
1667 netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1668
1669 # struct sockaddr_in ifr_netmask
1670 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1671 netdev_req(fd, SIOCSIFNETMASK, req)
1672
1673def disable_network(uid=None, gid=None):
1674 """
1675 Disable networking in the current process if the kernel supports it, else
1676 just return after logging to debug. To do this we need to create a new user
1677 namespace, then map back to the original uid/gid.
1678 """
1679 libc = ctypes.CDLL('libc.so.6')
1680
1681 # From sched.h
1682 # New user namespace
1683 CLONE_NEWUSER = 0x10000000
1684 # New network namespace
1685 CLONE_NEWNET = 0x40000000
1686
1687 if uid is None:
1688 uid = os.getuid()
1689 if gid is None:
1690 gid = os.getgid()
1691
1692 ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
1693 if ret != 0:
1694 logger.debug("System doesn't support disabling network without admin privs")
1695 return
1696 with open("/proc/self/uid_map", "w") as f:
1697 f.write("%s %s 1" % (uid, uid))
1698 with open("/proc/self/setgroups", "w") as f:
1699 f.write("deny")
1700 with open("/proc/self/gid_map", "w") as f:
1701 f.write("%s %s 1" % (gid, gid))
1702
1593def export_proxies(d): 1703def export_proxies(d):
1704 from bb.fetch2 import get_fetcher_environment
1594 """ export common proxies variables from datastore to environment """ 1705 """ export common proxies variables from datastore to environment """
1595 import os 1706 newenv = get_fetcher_environment(d)
1596 1707 for v in newenv:
1597 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', 1708 os.environ[v] = newenv[v]
1598 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
1599 'GIT_PROXY_COMMAND']
1600 exported = False
1601
1602 for v in variables:
1603 if v in os.environ.keys():
1604 exported = True
1605 else:
1606 v_proxy = d.getVar(v)
1607 if v_proxy is not None:
1608 os.environ[v] = v_proxy
1609 exported = True
1610
1611 return exported
1612
1613 1709
1614def load_plugins(logger, plugins, pluginpath): 1710def load_plugins(logger, plugins, pluginpath):
1615 def load_plugin(name): 1711 def load_plugin(name):
1616 logger.debug('Loading plugin %s' % name) 1712 logger.debug('Loading plugin %s' % name)
1617 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 1713 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1618 if spec: 1714 if spec:
1619 return spec.loader.load_module() 1715 mod = importlib.util.module_from_spec(spec)
1716 spec.loader.exec_module(mod)
1717 return mod
1620 1718
1621 logger.debug('Loading plugins from %s...' % pluginpath) 1719 logger.debug('Loading plugins from %s...' % pluginpath)
1622 1720
@@ -1669,3 +1767,102 @@ def is_semver(version):
1669 return False 1767 return False
1670 1768
1671 return True 1769 return True
1770
1771# Wrapper around os.rename which can handle cross device problems
1772# e.g. from container filesystems
1773def rename(src, dst):
1774 try:
1775 os.rename(src, dst)
1776 except OSError as err:
1777 if err.errno == 18:
1778 # Invalid cross-device link error
1779 shutil.move(src, dst)
1780 else:
1781 raise err
1782
1783@contextmanager
1784def environment(**envvars):
1785 """
1786 Context manager to selectively update the environment with the specified mapping.
1787 """
1788 backup = dict(os.environ)
1789 try:
1790 os.environ.update(envvars)
1791 yield
1792 finally:
1793 for var in envvars:
1794 if var in backup:
1795 os.environ[var] = backup[var]
1796 elif var in os.environ:
1797 del os.environ[var]
1798
1799def is_local_uid(uid=''):
1800 """
1801 Check whether uid is a local one or not.
1802 Can't use pwd module since it gets all UIDs, not local ones only.
1803 """
1804 if not uid:
1805 uid = os.getuid()
1806 with open('/etc/passwd', 'r') as f:
1807 for line in f:
1808 line_split = line.split(':')
1809 if len(line_split) < 3:
1810 continue
1811 if str(uid) == line_split[2]:
1812 return True
1813 return False
1814
1815def mkstemp(suffix=None, prefix=None, dir=None, text=False):
1816 """
1817 Generates a unique filename, independent of time.
1818
1819 mkstemp() in glibc (at least) generates unique file names based on the
1820 current system time. When combined with highly parallel builds, and
1821 operating over NFS (e.g. shared sstate/downloads) this can result in
1822 conflicts and race conditions.
1823
1824 This function adds additional entropy to the file name so that a collision
1825 is independent of time and thus extremely unlikely.
1826 """
1827 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
1828 if prefix:
1829 prefix = prefix + entropy
1830 else:
1831 prefix = tempfile.gettempprefix() + entropy
1832 return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
1833
1834def path_is_descendant(descendant, ancestor):
1835 """
1836 Returns True if the path `descendant` is a descendant of `ancestor`
1837 (including being equivalent to `ancestor` itself). Otherwise returns False.
1838 Correctly accounts for symlinks, bind mounts, etc. by using
1839 os.path.samestat() to compare paths
1840
1841 May raise any exception that os.stat() raises
1842 """
1843
1844 ancestor_stat = os.stat(ancestor)
1845
1846 # Recurse up each directory component of the descendant to see if it is
1847 # equivalent to the ancestor
1848 check_dir = os.path.abspath(descendant).rstrip("/")
1849 while check_dir:
1850 check_stat = os.stat(check_dir)
1851 if os.path.samestat(check_stat, ancestor_stat):
1852 return True
1853 check_dir = os.path.dirname(check_dir).rstrip("/")
1854
1855 return False
1856
1857# If we don't have a timeout of some kind and a process/thread exits badly (for example
1858# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
1859# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
1860@contextmanager
1861def lock_timeout(lock):
1862 held = lock.acquire(timeout=5*60)
1863 try:
1864 if not held:
1865 os._exit(1)
1866 yield held
1867 finally:
1868 lock.release()
diff --git a/bitbake/lib/bb/xattr.py b/bitbake/lib/bb/xattr.py
new file mode 100755
index 0000000000..7b634944a4
--- /dev/null
+++ b/bitbake/lib/bb/xattr.py
@@ -0,0 +1,126 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7import sys
8import ctypes
9import os
10import errno
11
12libc = ctypes.CDLL("libc.so.6", use_errno=True)
13fsencoding = sys.getfilesystemencoding()
14
15
16libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
17libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
18
19
20def listxattr(path, follow=True):
21 func = libc.listxattr if follow else libc.llistxattr
22
23 os_path = os.fsencode(path)
24
25 while True:
26 length = func(os_path, None, 0)
27
28 if length < 0:
29 err = ctypes.get_errno()
30 raise OSError(err, os.strerror(err), str(path))
31
32 if length == 0:
33 return []
34
35 arr = ctypes.create_string_buffer(length)
36
37 read_length = func(os_path, arr, length)
38 if read_length != length:
39 # Race!
40 continue
41
42 return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
43
44
45libc.getxattr.argtypes = [
46 ctypes.c_char_p,
47 ctypes.c_char_p,
48 ctypes.c_char_p,
49 ctypes.c_size_t,
50]
51libc.lgetxattr.argtypes = [
52 ctypes.c_char_p,
53 ctypes.c_char_p,
54 ctypes.c_char_p,
55 ctypes.c_size_t,
56]
57
58
59def getxattr(path, name, follow=True):
60 func = libc.getxattr if follow else libc.lgetxattr
61
62 os_path = os.fsencode(path)
63 os_name = os.fsencode(name)
64
65 while True:
66 length = func(os_path, os_name, None, 0)
67
68 if length < 0:
69 err = ctypes.get_errno()
70 if err == errno.ENODATA:
71 return None
72 raise OSError(err, os.strerror(err), str(path))
73
74 if length == 0:
75 return ""
76
77 arr = ctypes.create_string_buffer(length)
78
79 read_length = func(os_path, os_name, arr, length)
80 if read_length != length:
81 # Race!
82 continue
83
84 return arr.raw
85
86
87def get_all_xattr(path, follow=True):
88 attrs = {}
89
90 names = listxattr(path, follow)
91
92 for name in names:
93 value = getxattr(path, name, follow)
94 if value is None:
95 # This can happen if a value is erased after listxattr is called,
96 # so ignore it
97 continue
98 attrs[name] = value
99
100 return attrs
101
102
103def main():
104 import argparse
105 from pathlib import Path
106
107 parser = argparse.ArgumentParser()
108 parser.add_argument("path", help="File Path", type=Path)
109
110 args = parser.parse_args()
111
112 attrs = get_all_xattr(args.path)
113
114 for name, value in attrs.items():
115 try:
116 value = value.decode(fsencoding)
117 except UnicodeDecodeError:
118 pass
119
120 print(f"{name} = {value}")
121
122 return 0
123
124
125if __name__ == "__main__":
126 sys.exit(main())