diff options
629 files changed, 9738 insertions, 16145 deletions
diff --git a/bitbake/bin/bitbake-hashclient b/bitbake/bin/bitbake-hashclient index 610787ed2b..5d6f67046b 100755 --- a/bitbake/bin/bitbake-hashclient +++ b/bitbake/bin/bitbake-hashclient | |||
@@ -16,6 +16,7 @@ import time | |||
16 | import warnings | 16 | import warnings |
17 | import netrc | 17 | import netrc |
18 | import json | 18 | import json |
19 | import statistics | ||
19 | warnings.simplefilter("default") | 20 | warnings.simplefilter("default") |
20 | 21 | ||
21 | try: | 22 | try: |
@@ -81,6 +82,7 @@ def main(): | |||
81 | nonlocal found_hashes | 82 | nonlocal found_hashes |
82 | nonlocal missed_hashes | 83 | nonlocal missed_hashes |
83 | nonlocal max_time | 84 | nonlocal max_time |
85 | nonlocal times | ||
84 | 86 | ||
85 | with hashserv.create_client(args.address) as client: | 87 | with hashserv.create_client(args.address) as client: |
86 | for i in range(args.requests): | 88 | for i in range(args.requests): |
@@ -98,29 +100,41 @@ def main(): | |||
98 | else: | 100 | else: |
99 | missed_hashes += 1 | 101 | missed_hashes += 1 |
100 | 102 | ||
101 | max_time = max(elapsed, max_time) | 103 | times.append(elapsed) |
102 | pbar.update() | 104 | pbar.update() |
103 | 105 | ||
104 | max_time = 0 | 106 | max_time = 0 |
105 | found_hashes = 0 | 107 | found_hashes = 0 |
106 | missed_hashes = 0 | 108 | missed_hashes = 0 |
107 | lock = threading.Lock() | 109 | lock = threading.Lock() |
108 | total_requests = args.clients * args.requests | 110 | times = [] |
109 | start_time = time.perf_counter() | 111 | start_time = time.perf_counter() |
110 | with ProgressBar(total=total_requests) as pbar: | 112 | with ProgressBar(total=args.clients * args.requests) as pbar: |
111 | threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] | 113 | threads = [threading.Thread(target=thread_main, args=(pbar, lock), daemon=False) for _ in range(args.clients)] |
112 | for t in threads: | 114 | for t in threads: |
113 | t.start() | 115 | t.start() |
114 | 116 | ||
115 | for t in threads: | 117 | for t in threads: |
116 | t.join() | 118 | t.join() |
119 | total_elapsed = time.perf_counter() - start_time | ||
117 | 120 | ||
118 | elapsed = time.perf_counter() - start_time | ||
119 | with lock: | 121 | with lock: |
120 | print("%d requests in %.1fs. %.1f requests per second" % (total_requests, elapsed, total_requests / elapsed)) | 122 | mean = statistics.mean(times) |
121 | print("Average request time %.8fs" % (elapsed / total_requests)) | 123 | median = statistics.median(times) |
122 | print("Max request time was %.8fs" % max_time) | 124 | stddev = statistics.pstdev(times) |
123 | print("Found %d hashes, missed %d" % (found_hashes, missed_hashes)) | 125 | |
126 | print(f"Number of clients: {args.clients}") | ||
127 | print(f"Requests per client: {args.requests}") | ||
128 | print(f"Number of requests: {len(times)}") | ||
129 | print(f"Total elapsed time: {total_elapsed:.3f}s") | ||
130 | print(f"Total request rate: {len(times)/total_elapsed:.3f} req/s") | ||
131 | print(f"Average request time: {mean:.3f}s") | ||
132 | print(f"Median request time: {median:.3f}s") | ||
133 | print(f"Request time std dev: {stddev:.3f}s") | ||
134 | print(f"Maximum request time: {max(times):.3f}s") | ||
135 | print(f"Minimum request time: {min(times):.3f}s") | ||
136 | print(f"Hashes found: {found_hashes}") | ||
137 | print(f"Hashes missed: {missed_hashes}") | ||
124 | 138 | ||
125 | if args.report: | 139 | if args.report: |
126 | with ProgressBar(total=args.requests) as pbar: | 140 | with ProgressBar(total=args.requests) as pbar: |
@@ -225,6 +239,32 @@ def main(): | |||
225 | print("true" if result else "false") | 239 | print("true" if result else "false") |
226 | return 0 | 240 | return 0 |
227 | 241 | ||
242 | def handle_ping(args, client): | ||
243 | times = [] | ||
244 | for i in range(1, args.count + 1): | ||
245 | if not args.quiet: | ||
246 | print(f"Ping {i} of {args.count}... ", end="") | ||
247 | start_time = time.perf_counter() | ||
248 | client.ping() | ||
249 | elapsed = time.perf_counter() - start_time | ||
250 | times.append(elapsed) | ||
251 | if not args.quiet: | ||
252 | print(f"{elapsed:.3f}s") | ||
253 | |||
254 | mean = statistics.mean(times) | ||
255 | median = statistics.median(times) | ||
256 | std_dev = statistics.pstdev(times) | ||
257 | |||
258 | if not args.quiet: | ||
259 | print("------------------------") | ||
260 | print(f"Number of pings: {len(times)}") | ||
261 | print(f"Average round trip time: {mean:.3f}s") | ||
262 | print(f"Median round trip time: {median:.3f}s") | ||
263 | print(f"Round trip time std dev: {std_dev:.3f}s") | ||
264 | print(f"Min time is: {min(times):.3f}s") | ||
265 | print(f"Max time is: {max(times):.3f}s") | ||
266 | return 0 | ||
267 | |||
228 | parser = argparse.ArgumentParser(description='Hash Equivalence Client') | 268 | parser = argparse.ArgumentParser(description='Hash Equivalence Client') |
229 | parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') | 269 | parser.add_argument('--address', default=DEFAULT_ADDRESS, help='Server address (default "%(default)s")') |
230 | parser.add_argument('--log', default='WARNING', help='Set logging level') | 270 | parser.add_argument('--log', default='WARNING', help='Set logging level') |
@@ -322,6 +362,11 @@ def main(): | |||
322 | unihash_exists_parser.add_argument("unihash", help="Unihash to check") | 362 | unihash_exists_parser.add_argument("unihash", help="Unihash to check") |
323 | unihash_exists_parser.set_defaults(func=handle_unihash_exists) | 363 | unihash_exists_parser.set_defaults(func=handle_unihash_exists) |
324 | 364 | ||
365 | ping_parser = subparsers.add_parser('ping', help="Ping server") | ||
366 | ping_parser.add_argument("-n", "--count", type=int, help="Number of pings. Default is %(default)s", default=10) | ||
367 | ping_parser.add_argument("-q", "--quiet", action="store_true", help="Don't print each ping; only print results") | ||
368 | ping_parser.set_defaults(func=handle_ping) | ||
369 | |||
325 | args = parser.parse_args() | 370 | args = parser.parse_args() |
326 | 371 | ||
327 | logger = logging.getLogger('hashserv') | 372 | logger = logging.getLogger('hashserv') |
diff --git a/bitbake/bin/bitbake-hashserv b/bitbake/bin/bitbake-hashserv index 4bfb7abfbc..01503736b9 100755 --- a/bitbake/bin/bitbake-hashserv +++ b/bitbake/bin/bitbake-hashserv | |||
@@ -125,6 +125,11 @@ The following permissions are supported by the server: | |||
125 | default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None), | 125 | default=os.environ.get("HASHSERVER_ADMIN_PASSWORD", None), |
126 | help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)", | 126 | help="Create default admin user with password ADMIN_PASSWORD ($HASHSERVER_ADMIN_PASSWORD)", |
127 | ) | 127 | ) |
128 | parser.add_argument( | ||
129 | "--reuseport", | ||
130 | action="store_true", | ||
131 | help="Enable SO_REUSEPORT, allowing multiple servers to bind to the same port for load balancing", | ||
132 | ) | ||
128 | 133 | ||
129 | args = parser.parse_args() | 134 | args = parser.parse_args() |
130 | 135 | ||
@@ -132,7 +137,9 @@ The following permissions are supported by the server: | |||
132 | 137 | ||
133 | level = getattr(logging, args.log.upper(), None) | 138 | level = getattr(logging, args.log.upper(), None) |
134 | if not isinstance(level, int): | 139 | if not isinstance(level, int): |
135 | raise ValueError("Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log) | 140 | raise ValueError( |
141 | "Invalid log level: %s (Try ERROR/WARNING/INFO/DEBUG)" % args.log | ||
142 | ) | ||
136 | 143 | ||
137 | logger.setLevel(level) | 144 | logger.setLevel(level) |
138 | console = logging.StreamHandler() | 145 | console = logging.StreamHandler() |
@@ -155,6 +162,7 @@ The following permissions are supported by the server: | |||
155 | anon_perms=anon_perms, | 162 | anon_perms=anon_perms, |
156 | admin_username=args.admin_user, | 163 | admin_username=args.admin_user, |
157 | admin_password=args.admin_password, | 164 | admin_password=args.admin_password, |
165 | reuseport=args.reuseport, | ||
158 | ) | 166 | ) |
159 | server.serve_forever() | 167 | server.serve_forever() |
160 | return 0 | 168 | return 0 |
diff --git a/bitbake/bin/bitbake-prserv b/bitbake/bin/bitbake-prserv index ad0a069401..580e021fda 100755 --- a/bitbake/bin/bitbake-prserv +++ b/bitbake/bin/bitbake-prserv | |||
@@ -16,11 +16,18 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), "lib | |||
16 | import prserv | 16 | import prserv |
17 | import prserv.serv | 17 | import prserv.serv |
18 | 18 | ||
19 | VERSION = "1.1.0" | 19 | VERSION = "2.0.0" |
20 | 20 | ||
21 | PRHOST_DEFAULT="0.0.0.0" | 21 | PRHOST_DEFAULT="0.0.0.0" |
22 | PRPORT_DEFAULT=8585 | 22 | PRPORT_DEFAULT=8585 |
23 | 23 | ||
24 | def init_logger(logfile, loglevel): | ||
25 | numeric_level = getattr(logging, loglevel.upper(), None) | ||
26 | if not isinstance(numeric_level, int): | ||
27 | raise ValueError("Invalid log level: %s" % loglevel) | ||
28 | FORMAT = "%(asctime)-15s %(message)s" | ||
29 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) | ||
30 | |||
24 | def main(): | 31 | def main(): |
25 | parser = argparse.ArgumentParser( | 32 | parser = argparse.ArgumentParser( |
26 | description="BitBake PR Server. Version=%s" % VERSION, | 33 | description="BitBake PR Server. Version=%s" % VERSION, |
@@ -70,12 +77,25 @@ def main(): | |||
70 | action="store_true", | 77 | action="store_true", |
71 | help="open database in read-only mode", | 78 | help="open database in read-only mode", |
72 | ) | 79 | ) |
80 | parser.add_argument( | ||
81 | "-u", | ||
82 | "--upstream", | ||
83 | default=os.environ.get("PRSERVER_UPSTREAM", None), | ||
84 | help="Upstream PR service (host:port)", | ||
85 | ) | ||
73 | 86 | ||
74 | args = parser.parse_args() | 87 | args = parser.parse_args() |
75 | prserv.init_logger(os.path.abspath(args.log), args.loglevel) | 88 | init_logger(os.path.abspath(args.log), args.loglevel) |
76 | 89 | ||
77 | if args.start: | 90 | if args.start: |
78 | ret=prserv.serv.start_daemon(args.file, args.host, args.port, os.path.abspath(args.log), args.read_only) | 91 | ret=prserv.serv.start_daemon( |
92 | args.file, | ||
93 | args.host, | ||
94 | args.port, | ||
95 | os.path.abspath(args.log), | ||
96 | args.read_only, | ||
97 | args.upstream | ||
98 | ) | ||
79 | elif args.stop: | 99 | elif args.stop: |
80 | ret=prserv.serv.stop_daemon(args.host, args.port) | 100 | ret=prserv.serv.stop_daemon(args.host, args.port) |
81 | else: | 101 | else: |
diff --git a/bitbake/bin/bitbake-selftest b/bitbake/bin/bitbake-selftest index f25f23b1ae..ce901232fe 100755 --- a/bitbake/bin/bitbake-selftest +++ b/bitbake/bin/bitbake-selftest | |||
@@ -15,6 +15,7 @@ import unittest | |||
15 | try: | 15 | try: |
16 | import bb | 16 | import bb |
17 | import hashserv | 17 | import hashserv |
18 | import prserv | ||
18 | import layerindexlib | 19 | import layerindexlib |
19 | except RuntimeError as exc: | 20 | except RuntimeError as exc: |
20 | sys.exit(str(exc)) | 21 | sys.exit(str(exc)) |
@@ -33,6 +34,7 @@ tests = ["bb.tests.codeparser", | |||
33 | "bb.tests.utils", | 34 | "bb.tests.utils", |
34 | "bb.tests.compression", | 35 | "bb.tests.compression", |
35 | "hashserv.tests", | 36 | "hashserv.tests", |
37 | "prserv.tests", | ||
36 | "layerindexlib.tests.layerindexobj", | 38 | "layerindexlib.tests.layerindexobj", |
37 | "layerindexlib.tests.restapi", | 39 | "layerindexlib.tests.restapi", |
38 | "layerindexlib.tests.cooker"] | 40 | "layerindexlib.tests.cooker"] |
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py index 8b6ea2d8ed..574e0de5be 100644 --- a/bitbake/lib/bb/__init__.py +++ b/bitbake/lib/bb/__init__.py | |||
@@ -36,6 +36,7 @@ class BBHandledException(Exception): | |||
36 | 36 | ||
37 | import os | 37 | import os |
38 | import logging | 38 | import logging |
39 | from collections import namedtuple | ||
39 | 40 | ||
40 | 41 | ||
41 | class NullHandler(logging.Handler): | 42 | class NullHandler(logging.Handler): |
@@ -103,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin): | |||
103 | self.setup_bblogger(logger.name) | 104 | self.setup_bblogger(logger.name) |
104 | super().__init__(logger, *args, **kwargs) | 105 | super().__init__(logger, *args, **kwargs) |
105 | 106 | ||
106 | if sys.version_info < (3, 6): | ||
107 | # These properties were added in Python 3.6. Add them in older versions | ||
108 | # for compatibility | ||
109 | @property | ||
110 | def manager(self): | ||
111 | return self.logger.manager | ||
112 | |||
113 | @manager.setter | ||
114 | def manager(self, value): | ||
115 | self.logger.manager = value | ||
116 | |||
117 | @property | ||
118 | def name(self): | ||
119 | return self.logger.name | ||
120 | |||
121 | def __repr__(self): | ||
122 | logger = self.logger | ||
123 | level = logger.getLevelName(logger.getEffectiveLevel()) | ||
124 | return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level) | ||
125 | |||
126 | logging.LoggerAdapter = BBLoggerAdapter | 107 | logging.LoggerAdapter = BBLoggerAdapter |
127 | 108 | ||
128 | logger = logging.getLogger("BitBake") | 109 | logger = logging.getLogger("BitBake") |
@@ -227,3 +208,14 @@ def deprecate_import(current, modulename, fromlist, renames = None): | |||
227 | 208 | ||
228 | setattr(sys.modules[current], newname, newobj) | 209 | setattr(sys.modules[current], newname, newobj) |
229 | 210 | ||
211 | TaskData = namedtuple("TaskData", [ | ||
212 | "pn", | ||
213 | "taskname", | ||
214 | "fn", | ||
215 | "deps", | ||
216 | "provides", | ||
217 | "taskhash", | ||
218 | "unihash", | ||
219 | "hashfn", | ||
220 | "taskhash_deps", | ||
221 | ]) | ||
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py index 639e1607f8..a4371643d7 100644 --- a/bitbake/lib/bb/asyncrpc/__init__.py +++ b/bitbake/lib/bb/asyncrpc/__init__.py | |||
@@ -5,7 +5,7 @@ | |||
5 | # | 5 | # |
6 | 6 | ||
7 | 7 | ||
8 | from .client import AsyncClient, Client, ClientPool | 8 | from .client import AsyncClient, Client |
9 | from .serv import AsyncServer, AsyncServerConnection | 9 | from .serv import AsyncServer, AsyncServerConnection |
10 | from .connection import DEFAULT_MAX_CHUNK | 10 | from .connection import DEFAULT_MAX_CHUNK |
11 | from .exceptions import ( | 11 | from .exceptions import ( |
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py index 65f3f8964d..11179b0fcb 100644 --- a/bitbake/lib/bb/asyncrpc/client.py +++ b/bitbake/lib/bb/asyncrpc/client.py | |||
@@ -25,6 +25,9 @@ ADDR_TYPE_TCP = 1 | |||
25 | ADDR_TYPE_WS = 2 | 25 | ADDR_TYPE_WS = 2 |
26 | 26 | ||
27 | WEBSOCKETS_MIN_VERSION = (9, 1) | 27 | WEBSOCKETS_MIN_VERSION = (9, 1) |
28 | # Need websockets 10 with python 3.10+ | ||
29 | if sys.version_info >= (3, 10, 0): | ||
30 | WEBSOCKETS_MIN_VERSION = (10, 0) | ||
28 | 31 | ||
29 | 32 | ||
30 | def parse_address(addr): | 33 | def parse_address(addr): |
@@ -247,85 +250,9 @@ class Client(object): | |||
247 | def close(self): | 250 | def close(self): |
248 | if self.loop: | 251 | if self.loop: |
249 | self.loop.run_until_complete(self.client.close()) | 252 | self.loop.run_until_complete(self.client.close()) |
250 | if sys.version_info >= (3, 6): | ||
251 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
252 | self.loop.close() | ||
253 | self.loop = None | ||
254 | |||
255 | def __enter__(self): | ||
256 | return self | ||
257 | |||
258 | def __exit__(self, exc_type, exc_value, traceback): | ||
259 | self.close() | ||
260 | return False | ||
261 | |||
262 | |||
263 | class ClientPool(object): | ||
264 | def __init__(self, max_clients): | ||
265 | self.avail_clients = [] | ||
266 | self.num_clients = 0 | ||
267 | self.max_clients = max_clients | ||
268 | self.loop = None | ||
269 | self.client_condition = None | ||
270 | |||
271 | @abc.abstractmethod | ||
272 | async def _new_client(self): | ||
273 | raise NotImplementedError("Must be implemented in derived class") | ||
274 | |||
275 | def close(self): | ||
276 | if self.client_condition: | ||
277 | self.client_condition = None | ||
278 | |||
279 | if self.loop: | ||
280 | self.loop.run_until_complete(self.__close_clients()) | ||
281 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | 253 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
282 | self.loop.close() | 254 | self.loop.close() |
283 | self.loop = None | 255 | self.loop = None |
284 | |||
285 | def run_tasks(self, tasks): | ||
286 | if not self.loop: | ||
287 | self.loop = asyncio.new_event_loop() | ||
288 | |||
289 | thread = Thread(target=self.__thread_main, args=(tasks,)) | ||
290 | thread.start() | ||
291 | thread.join() | ||
292 | |||
293 | @contextlib.asynccontextmanager | ||
294 | async def get_client(self): | ||
295 | async with self.client_condition: | ||
296 | if self.avail_clients: | ||
297 | client = self.avail_clients.pop() | ||
298 | elif self.num_clients < self.max_clients: | ||
299 | self.num_clients += 1 | ||
300 | client = await self._new_client() | ||
301 | else: | ||
302 | while not self.avail_clients: | ||
303 | await self.client_condition.wait() | ||
304 | client = self.avail_clients.pop() | ||
305 | |||
306 | try: | ||
307 | yield client | ||
308 | finally: | ||
309 | async with self.client_condition: | ||
310 | self.avail_clients.append(client) | ||
311 | self.client_condition.notify() | ||
312 | |||
313 | def __thread_main(self, tasks): | ||
314 | async def process_task(task): | ||
315 | async with self.get_client() as client: | ||
316 | await task(client) | ||
317 | |||
318 | asyncio.set_event_loop(self.loop) | ||
319 | if not self.client_condition: | ||
320 | self.client_condition = asyncio.Condition() | ||
321 | tasks = [process_task(t) for t in tasks] | ||
322 | self.loop.run_until_complete(asyncio.gather(*tasks)) | ||
323 | |||
324 | async def __close_clients(self): | ||
325 | for c in self.avail_clients: | ||
326 | await c.close() | ||
327 | self.avail_clients = [] | ||
328 | self.num_clients = 0 | ||
329 | 256 | ||
330 | def __enter__(self): | 257 | def __enter__(self): |
331 | return self | 258 | return self |
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py index a66117acad..667217c5c1 100644 --- a/bitbake/lib/bb/asyncrpc/serv.py +++ b/bitbake/lib/bb/asyncrpc/serv.py | |||
@@ -138,14 +138,20 @@ class StreamServer(object): | |||
138 | 138 | ||
139 | 139 | ||
140 | class TCPStreamServer(StreamServer): | 140 | class TCPStreamServer(StreamServer): |
141 | def __init__(self, host, port, handler, logger): | 141 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
142 | super().__init__(handler, logger) | 142 | super().__init__(handler, logger) |
143 | self.host = host | 143 | self.host = host |
144 | self.port = port | 144 | self.port = port |
145 | self.reuseport = reuseport | ||
145 | 146 | ||
146 | def start(self, loop): | 147 | def start(self, loop): |
147 | self.server = loop.run_until_complete( | 148 | self.server = loop.run_until_complete( |
148 | asyncio.start_server(self.handle_stream_client, self.host, self.port) | 149 | asyncio.start_server( |
150 | self.handle_stream_client, | ||
151 | self.host, | ||
152 | self.port, | ||
153 | reuse_port=self.reuseport, | ||
154 | ) | ||
149 | ) | 155 | ) |
150 | 156 | ||
151 | for s in self.server.sockets: | 157 | for s in self.server.sockets: |
@@ -209,11 +215,12 @@ class UnixStreamServer(StreamServer): | |||
209 | 215 | ||
210 | 216 | ||
211 | class WebsocketsServer(object): | 217 | class WebsocketsServer(object): |
212 | def __init__(self, host, port, handler, logger): | 218 | def __init__(self, host, port, handler, logger, *, reuseport=False): |
213 | self.host = host | 219 | self.host = host |
214 | self.port = port | 220 | self.port = port |
215 | self.handler = handler | 221 | self.handler = handler |
216 | self.logger = logger | 222 | self.logger = logger |
223 | self.reuseport = reuseport | ||
217 | 224 | ||
218 | def start(self, loop): | 225 | def start(self, loop): |
219 | import websockets.server | 226 | import websockets.server |
@@ -224,6 +231,7 @@ class WebsocketsServer(object): | |||
224 | self.host, | 231 | self.host, |
225 | self.port, | 232 | self.port, |
226 | ping_interval=None, | 233 | ping_interval=None, |
234 | reuse_port=self.reuseport, | ||
227 | ) | 235 | ) |
228 | ) | 236 | ) |
229 | 237 | ||
@@ -262,14 +270,26 @@ class AsyncServer(object): | |||
262 | self.loop = None | 270 | self.loop = None |
263 | self.run_tasks = [] | 271 | self.run_tasks = [] |
264 | 272 | ||
265 | def start_tcp_server(self, host, port): | 273 | def start_tcp_server(self, host, port, *, reuseport=False): |
266 | self.server = TCPStreamServer(host, port, self._client_handler, self.logger) | 274 | self.server = TCPStreamServer( |
275 | host, | ||
276 | port, | ||
277 | self._client_handler, | ||
278 | self.logger, | ||
279 | reuseport=reuseport, | ||
280 | ) | ||
267 | 281 | ||
268 | def start_unix_server(self, path): | 282 | def start_unix_server(self, path): |
269 | self.server = UnixStreamServer(path, self._client_handler, self.logger) | 283 | self.server = UnixStreamServer(path, self._client_handler, self.logger) |
270 | 284 | ||
271 | def start_websocket_server(self, host, port): | 285 | def start_websocket_server(self, host, port, reuseport=False): |
272 | self.server = WebsocketsServer(host, port, self._client_handler, self.logger) | 286 | self.server = WebsocketsServer( |
287 | host, | ||
288 | port, | ||
289 | self._client_handler, | ||
290 | self.logger, | ||
291 | reuseport=reuseport, | ||
292 | ) | ||
273 | 293 | ||
274 | async def _client_handler(self, socket): | 294 | async def _client_handler(self, socket): |
275 | address = socket.address | 295 | address = socket.address |
@@ -368,8 +388,7 @@ class AsyncServer(object): | |||
368 | 388 | ||
369 | self._serve_forever(tasks) | 389 | self._serve_forever(tasks) |
370 | 390 | ||
371 | if sys.version_info >= (3, 6): | 391 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) |
372 | self.loop.run_until_complete(self.loop.shutdown_asyncgens()) | ||
373 | self.loop.close() | 392 | self.loop.close() |
374 | 393 | ||
375 | queue = multiprocessing.Queue() | 394 | queue = multiprocessing.Queue() |
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index 2e8b7ced3c..691bdff75e 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -484,19 +484,34 @@ class ShellParser(): | |||
484 | """ | 484 | """ |
485 | 485 | ||
486 | words = list(words) | 486 | words = list(words) |
487 | for word in list(words): | 487 | for word in words: |
488 | wtree = pyshlex.make_wordtree(word[1]) | 488 | wtree = pyshlex.make_wordtree(word[1]) |
489 | for part in wtree: | 489 | for part in wtree: |
490 | if not isinstance(part, list): | 490 | if not isinstance(part, list): |
491 | continue | 491 | continue |
492 | 492 | ||
493 | if part[0] in ('`', '$('): | 493 | candidates = [part] |
494 | command = pyshlex.wordtree_as_string(part[1:-1]) | 494 | |
495 | self._parse_shell(command) | 495 | # If command is of type: |
496 | 496 | # | |
497 | if word[0] in ("cmd_name", "cmd_word"): | 497 | # var="... $(cmd [...]) ..." |
498 | if word in words: | 498 | # |
499 | words.remove(word) | 499 | # Then iterate on what's between the quotes and if we find a |
500 | # list, make that what we check for below. | ||
501 | if len(part) >= 3 and part[0] == '"': | ||
502 | for p in part[1:-1]: | ||
503 | if isinstance(p, list): | ||
504 | candidates.append(p) | ||
505 | |||
506 | for candidate in candidates: | ||
507 | if len(candidate) >= 2: | ||
508 | if candidate[0] in ('`', '$('): | ||
509 | command = pyshlex.wordtree_as_string(candidate[1:-1]) | ||
510 | self._parse_shell(command) | ||
511 | |||
512 | if word[0] in ("cmd_name", "cmd_word"): | ||
513 | if word in words: | ||
514 | words.remove(word) | ||
500 | 515 | ||
501 | usetoken = False | 516 | usetoken = False |
502 | for word in words: | 517 | for word in words: |
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py index 939a999974..6754f986bf 100644 --- a/bitbake/lib/bb/cooker.py +++ b/bitbake/lib/bb/cooker.py | |||
@@ -680,14 +680,14 @@ class BBCooker: | |||
680 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) | 680 | bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) |
681 | return taskdata, runlist | 681 | return taskdata, runlist |
682 | 682 | ||
683 | def prepareTreeData(self, pkgs_to_build, task): | 683 | def prepareTreeData(self, pkgs_to_build, task, halt=False): |
684 | """ | 684 | """ |
685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build | 685 | Prepare a runqueue and taskdata object for iteration over pkgs_to_build |
686 | """ | 686 | """ |
687 | 687 | ||
688 | # We set halt to False here to prevent unbuildable targets raising | 688 | # We set halt to False here to prevent unbuildable targets raising |
689 | # an exception when we're just generating data | 689 | # an exception when we're just generating data |
690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) | 690 | taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True) |
691 | 691 | ||
692 | return runlist, taskdata | 692 | return runlist, taskdata |
693 | 693 | ||
@@ -701,7 +701,7 @@ class BBCooker: | |||
701 | if not task.startswith("do_"): | 701 | if not task.startswith("do_"): |
702 | task = "do_%s" % task | 702 | task = "do_%s" % task |
703 | 703 | ||
704 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) | 704 | runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True) |
705 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) | 705 | rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) |
706 | rq.rqdata.prepare() | 706 | rq.rqdata.prepare() |
707 | return self.buildDependTree(rq, taskdata) | 707 | return self.buildDependTree(rq, taskdata) |
@@ -1459,7 +1459,6 @@ class BBCooker: | |||
1459 | 1459 | ||
1460 | if t in task or getAllTaskSignatures: | 1460 | if t in task or getAllTaskSignatures: |
1461 | try: | 1461 | try: |
1462 | rq.rqdata.prepare_task_hash(tid) | ||
1463 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) | 1462 | sig.append([pn, t, rq.rqdata.get_task_unihash(tid)]) |
1464 | except KeyError: | 1463 | except KeyError: |
1465 | sig.append(self.getTaskSignatures(target, [t])[0]) | 1464 | sig.append(self.getTaskSignatures(target, [t])[0]) |
@@ -1813,8 +1812,8 @@ class CookerCollectFiles(object): | |||
1813 | bb.event.fire(CookerExit(), eventdata) | 1812 | bb.event.fire(CookerExit(), eventdata) |
1814 | 1813 | ||
1815 | # We need to track where we look so that we can know when the cache is invalid. There | 1814 | # We need to track where we look so that we can know when the cache is invalid. There |
1816 | # is no nice way to do this, this is horrid. We intercept the os.listdir() | 1815 | # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir() |
1817 | # (or os.scandir() for python 3.6+) calls while we run glob(). | 1816 | # calls while we run glob(). |
1818 | origlistdir = os.listdir | 1817 | origlistdir = os.listdir |
1819 | if hasattr(os, 'scandir'): | 1818 | if hasattr(os, 'scandir'): |
1820 | origscandir = os.scandir | 1819 | origscandir = os.scandir |
@@ -2225,9 +2224,8 @@ class CookerParser(object): | |||
2225 | 2224 | ||
2226 | for process in self.processes: | 2225 | for process in self.processes: |
2227 | process.join() | 2226 | process.join() |
2228 | # Added in 3.7, cleans up zombies | 2227 | # clean up zombies |
2229 | if hasattr(process, "close"): | 2228 | process.close() |
2230 | process.close() | ||
2231 | 2229 | ||
2232 | bb.codeparser.parser_cache_save() | 2230 | bb.codeparser.parser_cache_save() |
2233 | bb.codeparser.parser_cache_savemerge() | 2231 | bb.codeparser.parser_cache_savemerge() |
@@ -2237,12 +2235,13 @@ class CookerParser(object): | |||
2237 | profiles = [] | 2235 | profiles = [] |
2238 | for i in self.process_names: | 2236 | for i in self.process_names: |
2239 | logfile = "profile-parse-%s.log" % i | 2237 | logfile = "profile-parse-%s.log" % i |
2240 | if os.path.exists(logfile): | 2238 | if os.path.exists(logfile) and os.path.getsize(logfile): |
2241 | profiles.append(logfile) | 2239 | profiles.append(logfile) |
2242 | 2240 | ||
2243 | pout = "profile-parse.log.processed" | 2241 | if profiles: |
2244 | bb.utils.process_profilelog(profiles, pout = pout) | 2242 | pout = "profile-parse.log.processed" |
2245 | print("Processed parsing statistics saved to %s" % (pout)) | 2243 | bb.utils.process_profilelog(profiles, pout = pout) |
2244 | print("Processed parsing statistics saved to %s" % (pout)) | ||
2246 | 2245 | ||
2247 | def final_cleanup(self): | 2246 | def final_cleanup(self): |
2248 | if self.syncthread: | 2247 | if self.syncthread: |
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py index ff5f8dc755..b55e885d7b 100644 --- a/bitbake/lib/bb/fetch2/npmsw.py +++ b/bitbake/lib/bb/fetch2/npmsw.py | |||
@@ -268,7 +268,7 @@ class NpmShrinkWrap(FetchMethod): | |||
268 | 268 | ||
269 | def unpack(self, ud, rootdir, d): | 269 | def unpack(self, ud, rootdir, d): |
270 | """Unpack the downloaded dependencies""" | 270 | """Unpack the downloaded dependencies""" |
271 | destdir = d.getVar("S") | 271 | destdir = rootdir |
272 | destsuffix = ud.parm.get("destsuffix") | 272 | destsuffix = ud.parm.get("destsuffix") |
273 | if destsuffix: | 273 | if destsuffix: |
274 | destdir = os.path.join(rootdir, destsuffix) | 274 | destdir = os.path.join(rootdir, destsuffix) |
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py index fbfa6938ac..d76b1d0d38 100644 --- a/bitbake/lib/bb/fetch2/wget.py +++ b/bitbake/lib/bb/fetch2/wget.py | |||
@@ -134,6 +134,15 @@ class Wget(FetchMethod): | |||
134 | 134 | ||
135 | self._runwget(ud, d, fetchcmd, False) | 135 | self._runwget(ud, d, fetchcmd, False) |
136 | 136 | ||
137 | # Sanity check since wget can pretend it succeed when it didn't | ||
138 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
139 | if not os.path.exists(localpath): | ||
140 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri) | ||
141 | |||
142 | if os.path.getsize(localpath) == 0: | ||
143 | os.remove(localpath) | ||
144 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
145 | |||
137 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the | 146 | # Try and verify any checksum now, meaning if it isn't correct, we don't remove the |
138 | # original file, which might be a race (imagine two recipes referencing the same | 147 | # original file, which might be a race (imagine two recipes referencing the same |
139 | # source, one with an incorrect checksum) | 148 | # source, one with an incorrect checksum) |
@@ -143,15 +152,6 @@ class Wget(FetchMethod): | |||
143 | # Our lock prevents multiple writers but mirroring code may grab incomplete files | 152 | # Our lock prevents multiple writers but mirroring code may grab incomplete files |
144 | os.rename(localpath, localpath[:-4]) | 153 | os.rename(localpath, localpath[:-4]) |
145 | 154 | ||
146 | # Sanity check since wget can pretend it succeed when it didn't | ||
147 | # Also, this used to happen if sourceforge sent us to the mirror page | ||
148 | if not os.path.exists(ud.localpath): | ||
149 | raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) | ||
150 | |||
151 | if os.path.getsize(ud.localpath) == 0: | ||
152 | os.remove(ud.localpath) | ||
153 | raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) | ||
154 | |||
155 | return True | 155 | return True |
156 | 156 | ||
157 | def checkstatus(self, fetch, ud, d, try_again=True): | 157 | def checkstatus(self, fetch, ud, d, try_again=True): |
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py index a4358f1374..7ffdaa6fd7 100644 --- a/bitbake/lib/bb/parse/__init__.py +++ b/bitbake/lib/bb/parse/__init__.py | |||
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe): | |||
49 | __mtime_cache = {} | 49 | __mtime_cache = {} |
50 | def cached_mtime(f): | 50 | def cached_mtime(f): |
51 | if f not in __mtime_cache: | 51 | if f not in __mtime_cache: |
52 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 52 | res = os.stat(f) |
53 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
53 | return __mtime_cache[f] | 54 | return __mtime_cache[f] |
54 | 55 | ||
55 | def cached_mtime_noerror(f): | 56 | def cached_mtime_noerror(f): |
56 | if f not in __mtime_cache: | 57 | if f not in __mtime_cache: |
57 | try: | 58 | try: |
58 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 59 | res = os.stat(f) |
60 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
59 | except OSError: | 61 | except OSError: |
60 | return 0 | 62 | return 0 |
61 | return __mtime_cache[f] | 63 | return __mtime_cache[f] |
62 | 64 | ||
63 | def check_mtime(f, mtime): | 65 | def check_mtime(f, mtime): |
64 | try: | 66 | try: |
65 | current_mtime = os.stat(f)[stat.ST_MTIME] | 67 | res = os.stat(f) |
68 | current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
66 | __mtime_cache[f] = current_mtime | 69 | __mtime_cache[f] = current_mtime |
67 | except OSError: | 70 | except OSError: |
68 | current_mtime = 0 | 71 | current_mtime = 0 |
@@ -70,7 +73,8 @@ def check_mtime(f, mtime): | |||
70 | 73 | ||
71 | def update_mtime(f): | 74 | def update_mtime(f): |
72 | try: | 75 | try: |
73 | __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] | 76 | res = os.stat(f) |
77 | __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino) | ||
74 | except OSError: | 78 | except OSError: |
75 | if f in __mtime_cache: | 79 | if f in __mtime_cache: |
76 | del __mtime_cache[f] | 80 | del __mtime_cache[f] |
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py index 6b43f303d5..93079a9776 100644 --- a/bitbake/lib/bb/runqueue.py +++ b/bitbake/lib/bb/runqueue.py | |||
@@ -1273,27 +1273,41 @@ class RunQueueData: | |||
1273 | 1273 | ||
1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) | 1274 | bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) |
1275 | 1275 | ||
1276 | starttime = time.time() | ||
1277 | lasttime = starttime | ||
1278 | |||
1276 | # Iterate over the task list and call into the siggen code | 1279 | # Iterate over the task list and call into the siggen code |
1277 | dealtwith = set() | 1280 | dealtwith = set() |
1278 | todeal = set(self.runtaskentries) | 1281 | todeal = set(self.runtaskentries) |
1279 | while todeal: | 1282 | while todeal: |
1283 | ready = set() | ||
1280 | for tid in todeal.copy(): | 1284 | for tid in todeal.copy(): |
1281 | if not (self.runtaskentries[tid].depends - dealtwith): | 1285 | if not (self.runtaskentries[tid].depends - dealtwith): |
1282 | dealtwith.add(tid) | 1286 | self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1283 | todeal.remove(tid) | 1287 | # get_taskhash for a given tid *must* be called before get_unihash* below |
1284 | self.prepare_task_hash(tid) | 1288 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) |
1285 | bb.event.check_for_interrupts(self.cooker.data) | 1289 | ready.add(tid) |
1290 | unihashes = bb.parse.siggen.get_unihashes(ready) | ||
1291 | for tid in ready: | ||
1292 | dealtwith.add(tid) | ||
1293 | todeal.remove(tid) | ||
1294 | self.runtaskentries[tid].unihash = unihashes[tid] | ||
1295 | |||
1296 | bb.event.check_for_interrupts(self.cooker.data) | ||
1297 | |||
1298 | if time.time() > (lasttime + 30): | ||
1299 | lasttime = time.time() | ||
1300 | hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime)) | ||
1301 | |||
1302 | endtime = time.time() | ||
1303 | if (endtime-starttime > 60): | ||
1304 | hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime)) | ||
1286 | 1305 | ||
1287 | bb.parse.siggen.writeout_file_checksum_cache() | 1306 | bb.parse.siggen.writeout_file_checksum_cache() |
1288 | 1307 | ||
1289 | #self.dump_data() | 1308 | #self.dump_data() |
1290 | return len(self.runtaskentries) | 1309 | return len(self.runtaskentries) |
1291 | 1310 | ||
1292 | def prepare_task_hash(self, tid): | ||
1293 | self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1294 | self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) | ||
1295 | self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) | ||
1296 | |||
1297 | def dump_data(self): | 1311 | def dump_data(self): |
1298 | """ | 1312 | """ |
1299 | Dump some debug information on the internal data structures | 1313 | Dump some debug information on the internal data structures |
@@ -2438,15 +2452,17 @@ class RunQueueExecute: | |||
2438 | taskdepdata_cache = {} | 2452 | taskdepdata_cache = {} |
2439 | for task in self.rqdata.runtaskentries: | 2453 | for task in self.rqdata.runtaskentries: |
2440 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) | 2454 | (mc, fn, taskname, taskfn) = split_tid_mcfn(task) |
2441 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | 2455 | taskdepdata_cache[task] = bb.TaskData( |
2442 | deps = self.rqdata.runtaskentries[task].depends | 2456 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2443 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2457 | taskname = taskname, |
2444 | taskhash = self.rqdata.runtaskentries[task].hash | 2458 | fn = fn, |
2445 | unihash = self.rqdata.runtaskentries[task].unihash | 2459 | deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends), |
2446 | deps = self.filtermcdeps(task, mc, deps) | 2460 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], |
2447 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2461 | taskhash = self.rqdata.runtaskentries[task].hash, |
2448 | taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps | 2462 | unihash = self.rqdata.runtaskentries[task].unihash, |
2449 | taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn, taskhash_deps] | 2463 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], |
2464 | taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps, | ||
2465 | ) | ||
2450 | 2466 | ||
2451 | self.taskdepdata_cache = taskdepdata_cache | 2467 | self.taskdepdata_cache = taskdepdata_cache |
2452 | 2468 | ||
@@ -2461,9 +2477,11 @@ class RunQueueExecute: | |||
2461 | while next: | 2477 | while next: |
2462 | additional = [] | 2478 | additional = [] |
2463 | for revdep in next: | 2479 | for revdep in next: |
2464 | self.taskdepdata_cache[revdep][6] = self.rqdata.runtaskentries[revdep].unihash | 2480 | self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace( |
2481 | unihash=self.rqdata.runtaskentries[revdep].unihash | ||
2482 | ) | ||
2465 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] | 2483 | taskdepdata[revdep] = self.taskdepdata_cache[revdep] |
2466 | for revdep2 in self.taskdepdata_cache[revdep][3]: | 2484 | for revdep2 in self.taskdepdata_cache[revdep].deps: |
2467 | if revdep2 not in taskdepdata: | 2485 | if revdep2 not in taskdepdata: |
2468 | additional.append(revdep2) | 2486 | additional.append(revdep2) |
2469 | next = additional | 2487 | next = additional |
@@ -2557,17 +2575,28 @@ class RunQueueExecute: | |||
2557 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): | 2575 | elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): |
2558 | next.add(p) | 2576 | next.add(p) |
2559 | 2577 | ||
2578 | starttime = time.time() | ||
2579 | lasttime = starttime | ||
2580 | |||
2560 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled | 2581 | # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled |
2561 | while next: | 2582 | while next: |
2562 | current = next.copy() | 2583 | current = next.copy() |
2563 | next = set() | 2584 | next = set() |
2585 | ready = {} | ||
2564 | for tid in current: | 2586 | for tid in current: |
2565 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): | 2587 | if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): |
2566 | continue | 2588 | continue |
2589 | # get_taskhash for a given tid *must* be called before get_unihash* below | ||
2590 | ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | ||
2591 | |||
2592 | unihashes = bb.parse.siggen.get_unihashes(ready.keys()) | ||
2593 | |||
2594 | for tid in ready: | ||
2567 | orighash = self.rqdata.runtaskentries[tid].hash | 2595 | orighash = self.rqdata.runtaskentries[tid].hash |
2568 | newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches) | 2596 | newhash = ready[tid] |
2569 | origuni = self.rqdata.runtaskentries[tid].unihash | 2597 | origuni = self.rqdata.runtaskentries[tid].unihash |
2570 | newuni = bb.parse.siggen.get_unihash(tid) | 2598 | newuni = unihashes[tid] |
2599 | |||
2571 | # FIXME, need to check it can come from sstate at all for determinism? | 2600 | # FIXME, need to check it can come from sstate at all for determinism? |
2572 | remapped = False | 2601 | remapped = False |
2573 | if newuni == origuni: | 2602 | if newuni == origuni: |
@@ -2588,6 +2617,15 @@ class RunQueueExecute: | |||
2588 | next |= self.rqdata.runtaskentries[tid].revdeps | 2617 | next |= self.rqdata.runtaskentries[tid].revdeps |
2589 | total.remove(tid) | 2618 | total.remove(tid) |
2590 | next.intersection_update(total) | 2619 | next.intersection_update(total) |
2620 | bb.event.check_for_interrupts(self.cooker.data) | ||
2621 | |||
2622 | if time.time() > (lasttime + 30): | ||
2623 | lasttime = time.time() | ||
2624 | hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime)) | ||
2625 | |||
2626 | endtime = time.time() | ||
2627 | if (endtime-starttime > 60): | ||
2628 | hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime)) | ||
2591 | 2629 | ||
2592 | if changed: | 2630 | if changed: |
2593 | for mc in self.rq.worker: | 2631 | for mc in self.rq.worker: |
@@ -2807,14 +2845,19 @@ class RunQueueExecute: | |||
2807 | additional = [] | 2845 | additional = [] |
2808 | for revdep in next: | 2846 | for revdep in next: |
2809 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) | 2847 | (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) |
2810 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] | ||
2811 | deps = getsetscenedeps(revdep) | 2848 | deps = getsetscenedeps(revdep) |
2812 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] | 2849 | |
2813 | taskhash = self.rqdata.runtaskentries[revdep].hash | 2850 | taskdepdata[revdep] = bb.TaskData( |
2814 | unihash = self.rqdata.runtaskentries[revdep].unihash | 2851 | pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn], |
2815 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] | 2852 | taskname = taskname, |
2816 | taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps | 2853 | fn = fn, |
2817 | taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn, taskhash_deps] | 2854 | deps = deps, |
2855 | provides = self.rqdata.dataCaches[mc].fn_provides[taskfn], | ||
2856 | taskhash = self.rqdata.runtaskentries[revdep].hash, | ||
2857 | unihash = self.rqdata.runtaskentries[revdep].unihash, | ||
2858 | hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn], | ||
2859 | taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps, | ||
2860 | ) | ||
2818 | for revdep2 in deps: | 2861 | for revdep2 in deps: |
2819 | if revdep2 not in taskdepdata: | 2862 | if revdep2 not in taskdepdata: |
2820 | additional.append(revdep2) | 2863 | additional.append(revdep2) |
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py index 03dfda6f3c..92066da00c 100644 --- a/bitbake/lib/bb/siggen.py +++ b/bitbake/lib/bb/siggen.py | |||
@@ -540,7 +540,7 @@ class SignatureGeneratorUniHashMixIn(object): | |||
540 | def __init__(self, data): | 540 | def __init__(self, data): |
541 | self.extramethod = {} | 541 | self.extramethod = {} |
542 | # NOTE: The cache only tracks hashes that exist. Hashes that don't | 542 | # NOTE: The cache only tracks hashes that exist. Hashes that don't |
543 | # exist are always queries from the server since it is possible for | 543 | # exist are always queried from the server since it is possible for |
544 | # hashes to appear over time, but much less likely for them to | 544 | # hashes to appear over time, but much less likely for them to |
545 | # disappear | 545 | # disappear |
546 | self.unihash_exists_cache = set() | 546 | self.unihash_exists_cache = set() |
@@ -558,11 +558,11 @@ class SignatureGeneratorUniHashMixIn(object): | |||
558 | super().__init__(data) | 558 | super().__init__(data) |
559 | 559 | ||
560 | def get_taskdata(self): | 560 | def get_taskdata(self): |
561 | return (self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env) + super().get_taskdata() | 561 | return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata() |
562 | 562 | ||
563 | def set_taskdata(self, data): | 563 | def set_taskdata(self, data): |
564 | self.server, self.method, self.extramethod, self.max_parallel, self.username, self.password, self.env = data[:7] | 564 | self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6] |
565 | super().set_taskdata(data[7:]) | 565 | super().set_taskdata(data[6:]) |
566 | 566 | ||
567 | def get_hashserv_creds(self): | 567 | def get_hashserv_creds(self): |
568 | if self.username and self.password: | 568 | if self.username and self.password: |
@@ -595,13 +595,6 @@ class SignatureGeneratorUniHashMixIn(object): | |||
595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) | 595 | self._client = hashserv.create_client(self.server, **self.get_hashserv_creds()) |
596 | yield self._client | 596 | yield self._client |
597 | 597 | ||
598 | @contextmanager | ||
599 | def client_pool(self): | ||
600 | with self._client_env(): | ||
601 | if getattr(self, '_client_pool', None) is None: | ||
602 | self._client_pool = hashserv.client.ClientPool(self.server, self.max_parallel, **self.get_hashserv_creds()) | ||
603 | yield self._client_pool | ||
604 | |||
605 | def reset(self, data): | 598 | def reset(self, data): |
606 | self.__close_clients() | 599 | self.__close_clients() |
607 | return super().reset(data) | 600 | return super().reset(data) |
@@ -678,25 +671,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
678 | if len(query) == 0: | 671 | if len(query) == 0: |
679 | return {} | 672 | return {} |
680 | 673 | ||
681 | uncached_query = {} | 674 | query_keys = [] |
682 | result = {} | 675 | result = {} |
683 | for key, unihash in query.items(): | 676 | for key, unihash in query.items(): |
684 | if unihash in self.unihash_exists_cache: | 677 | if unihash in self.unihash_exists_cache: |
685 | result[key] = True | 678 | result[key] = True |
686 | else: | 679 | else: |
687 | uncached_query[key] = unihash | 680 | query_keys.append(key) |
688 | 681 | ||
689 | if self.max_parallel <= 1 or len(uncached_query) <= 1: | 682 | if query_keys: |
690 | # No parallelism required. Make the query serially with the single client | ||
691 | with self.client() as client: | 683 | with self.client() as client: |
692 | uncached_result = { | 684 | query_result = client.unihash_exists_batch(query[k] for k in query_keys) |
693 | key: client.unihash_exists(value) for key, value in uncached_query.items() | ||
694 | } | ||
695 | else: | ||
696 | with self.client_pool() as client_pool: | ||
697 | uncached_result = client_pool.unihashes_exist(uncached_query) | ||
698 | 685 | ||
699 | for key, exists in uncached_result.items(): | 686 | for idx, key in enumerate(query_keys): |
687 | exists = query_result[idx] | ||
700 | if exists: | 688 | if exists: |
701 | self.unihash_exists_cache.add(query[key]) | 689 | self.unihash_exists_cache.add(query[key]) |
702 | result[key] = exists | 690 | result[key] = exists |
@@ -712,29 +700,20 @@ class SignatureGeneratorUniHashMixIn(object): | |||
712 | unihash | 700 | unihash |
713 | """ | 701 | """ |
714 | result = {} | 702 | result = {} |
715 | queries = {} | 703 | query_tids = [] |
716 | query_result = {} | ||
717 | 704 | ||
718 | for tid in tids: | 705 | for tid in tids: |
719 | unihash = self.get_cached_unihash(tid) | 706 | unihash = self.get_cached_unihash(tid) |
720 | if unihash: | 707 | if unihash: |
721 | result[tid] = unihash | 708 | result[tid] = unihash |
722 | else: | 709 | else: |
723 | queries[tid] = (self._get_method(tid), self.taskhash[tid]) | 710 | query_tids.append(tid) |
724 | 711 | ||
725 | if len(queries) == 0: | 712 | if query_tids: |
726 | return result | ||
727 | |||
728 | if self.max_parallel <= 1 or len(queries) <= 1: | ||
729 | # No parallelism required. Make the query serially with the single client | ||
730 | with self.client() as client: | 713 | with self.client() as client: |
731 | for tid, args in queries.items(): | 714 | unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids) |
732 | query_result[tid] = client.get_unihash(*args) | ||
733 | else: | ||
734 | with self.client_pool() as client_pool: | ||
735 | query_result = client_pool.get_unihashes(queries) | ||
736 | 715 | ||
737 | for tid, unihash in query_result.items(): | 716 | for idx, tid in enumerate(query_tids): |
738 | # In the absence of being able to discover a unique hash from the | 717 | # In the absence of being able to discover a unique hash from the |
739 | # server, make it be equivalent to the taskhash. The unique "hash" only | 718 | # server, make it be equivalent to the taskhash. The unique "hash" only |
740 | # really needs to be a unique string (not even necessarily a hash), but | 719 | # really needs to be a unique string (not even necessarily a hash), but |
@@ -749,6 +728,8 @@ class SignatureGeneratorUniHashMixIn(object): | |||
749 | # to the server, there is a better chance that they will agree on | 728 | # to the server, there is a better chance that they will agree on |
750 | # the unique hash. | 729 | # the unique hash. |
751 | taskhash = self.taskhash[tid] | 730 | taskhash = self.taskhash[tid] |
731 | unihash = unihashes[idx] | ||
732 | |||
752 | if unihash: | 733 | if unihash: |
753 | # A unique hash equal to the taskhash is not very interesting, | 734 | # A unique hash equal to the taskhash is not very interesting, |
754 | # so it is reported it at debug level 2. If they differ, that | 735 | # so it is reported it at debug level 2. If they differ, that |
@@ -895,7 +876,6 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG | |||
895 | super().init_rundepcheck(data) | 876 | super().init_rundepcheck(data) |
896 | self.server = data.getVar('BB_HASHSERVE') | 877 | self.server = data.getVar('BB_HASHSERVE') |
897 | self.method = "sstate_output_hash" | 878 | self.method = "sstate_output_hash" |
898 | self.max_parallel = 1 | ||
899 | 879 | ||
900 | def clean_checksum_file_path(file_checksum_tuple): | 880 | def clean_checksum_file_path(file_checksum_tuple): |
901 | f, cs = file_checksum_tuple | 881 | f, cs = file_checksum_tuple |
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py index f6585fb3aa..c0d1362a0c 100644 --- a/bitbake/lib/bb/tests/codeparser.py +++ b/bitbake/lib/bb/tests/codeparser.py | |||
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc | |||
106 | self.parseExpression("foo=$(echo bar)") | 106 | self.parseExpression("foo=$(echo bar)") |
107 | self.assertExecs(set(["echo"])) | 107 | self.assertExecs(set(["echo"])) |
108 | 108 | ||
109 | def test_assign_subshell_expansion_quotes(self): | ||
110 | self.parseExpression('foo="$(echo bar)"') | ||
111 | self.assertExecs(set(["echo"])) | ||
112 | |||
113 | def test_assign_subshell_expansion_nested(self): | ||
114 | self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"') | ||
115 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
116 | |||
117 | def test_assign_subshell_expansion_multiple(self): | ||
118 | self.parseExpression('foo="$(func1 "$(func2)") $(func3)"') | ||
119 | self.assertExecs(set(["func1", "func2", "func3"])) | ||
120 | |||
121 | def test_assign_subshell_expansion_escaped_quotes(self): | ||
122 | self.parseExpression('foo="\\"fo\\"o$(func1)"') | ||
123 | self.assertExecs(set(["func1"])) | ||
124 | |||
125 | def test_assign_subshell_expansion_empty(self): | ||
126 | self.parseExpression('foo="bar$()foo"') | ||
127 | self.assertExecs(set()) | ||
128 | |||
129 | def test_assign_subshell_backticks(self): | ||
130 | self.parseExpression("foo=`echo bar`") | ||
131 | self.assertExecs(set(["echo"])) | ||
132 | |||
133 | def test_assign_subshell_backticks_quotes(self): | ||
134 | self.parseExpression('foo="`echo bar`"') | ||
135 | self.assertExecs(set(["echo"])) | ||
136 | |||
137 | def test_assign_subshell_backticks_multiple(self): | ||
138 | self.parseExpression('foo="`func1 bar` `func2`"') | ||
139 | self.assertExecs(set(["func1", "func2"])) | ||
140 | |||
141 | def test_assign_subshell_backticks_escaped_quotes(self): | ||
142 | self.parseExpression('foo="\\"fo\\"o`func1`"') | ||
143 | self.assertExecs(set(["func1"])) | ||
144 | |||
145 | def test_assign_subshell_backticks_empty(self): | ||
146 | self.parseExpression('foo="bar``foo"') | ||
147 | self.assertExecs(set()) | ||
148 | |||
109 | def test_shell_unexpanded(self): | 149 | def test_shell_unexpanded(self): |
110 | self.setEmptyVars(["QT_BASE_NAME"]) | 150 | self.setEmptyVars(["QT_BASE_NAME"]) |
111 | self.parseExpression('echo "${QT_BASE_NAME}"') | 151 | self.parseExpression('echo "${QT_BASE_NAME}"') |
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py index ed7a39a723..1e55cdd299 100644 --- a/bitbake/lib/bb/tests/fetch.py +++ b/bitbake/lib/bb/tests/fetch.py | |||
@@ -1525,7 +1525,7 @@ class FetchLatestVersionTest(FetcherTest): | |||
1525 | 1525 | ||
1526 | def test_wget_latest_versionstring(self): | 1526 | def test_wget_latest_versionstring(self): |
1527 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" | 1527 | testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" |
1528 | server = HTTPService(testdata) | 1528 | server = HTTPService(testdata, host="127.0.0.1") |
1529 | server.start() | 1529 | server.start() |
1530 | port = server.port | 1530 | port = server.port |
1531 | try: | 1531 | try: |
@@ -1533,10 +1533,10 @@ class FetchLatestVersionTest(FetcherTest): | |||
1533 | self.d.setVar("PN", k[0]) | 1533 | self.d.setVar("PN", k[0]) |
1534 | checkuri = "" | 1534 | checkuri = "" |
1535 | if k[2]: | 1535 | if k[2]: |
1536 | checkuri = "http://localhost:%s/" % port + k[2] | 1536 | checkuri = "http://127.0.0.1:%s/" % port + k[2] |
1537 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) | 1537 | self.d.setVar("UPSTREAM_CHECK_URI", checkuri) |
1538 | self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) | 1538 | self.d.setVar("UPSTREAM_CHECK_REGEX", k[3]) |
1539 | url = "http://localhost:%s/" % port + k[1] | 1539 | url = "http://127.0.0.1:%s/" % port + k[1] |
1540 | ud = bb.fetch2.FetchData(url, self.d) | 1540 | ud = bb.fetch2.FetchData(url, self.d) |
1541 | pupver = ud.method.latest_versionstring(ud, self.d) | 1541 | pupver = ud.method.latest_versionstring(ud, self.d) |
1542 | verstring = pupver[0] | 1542 | verstring = pupver[0] |
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py index 8b212b7803..4ee45d67a2 100644 --- a/bitbake/lib/bb/ui/buildinfohelper.py +++ b/bitbake/lib/bb/ui/buildinfohelper.py | |||
@@ -559,7 +559,10 @@ class ORMWrapper(object): | |||
559 | # we might have an invalid link; no way to detect this. just set it to None | 559 | # we might have an invalid link; no way to detect this. just set it to None |
560 | filetarget_obj = None | 560 | filetarget_obj = None |
561 | 561 | ||
562 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | 562 | try: |
563 | parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) | ||
564 | except Target_File.DoesNotExist: | ||
565 | parent_obj = None | ||
563 | 566 | ||
564 | Target_File.objects.create( | 567 | Target_File.objects.create( |
565 | target = target_obj, | 568 | target = target_obj, |
diff --git a/bitbake/lib/bs4/AUTHORS b/bitbake/lib/bs4/AUTHORS new file mode 100644 index 0000000000..1f14fe07de --- /dev/null +++ b/bitbake/lib/bs4/AUTHORS | |||
@@ -0,0 +1,49 @@ | |||
1 | Behold, mortal, the origins of Beautiful Soup... | ||
2 | ================================================ | ||
3 | |||
4 | Leonard Richardson is the primary maintainer. | ||
5 | |||
6 | Aaron DeVore and Isaac Muse have made significant contributions to the | ||
7 | code base. | ||
8 | |||
9 | Mark Pilgrim provided the encoding detection code that forms the base | ||
10 | of UnicodeDammit. | ||
11 | |||
12 | Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful | ||
13 | Soup 4 working under Python 3. | ||
14 | |||
15 | Simon Willison wrote soupselect, which was used to make Beautiful Soup | ||
16 | support CSS selectors. Isaac Muse wrote SoupSieve, which made it | ||
17 | possible to _remove_ the CSS selector code from Beautiful Soup. | ||
18 | |||
19 | Sam Ruby helped with a lot of edge cases. | ||
20 | |||
21 | Jonathan Ellis was awarded the prestigious Beau Potage D'Or for his | ||
22 | work in solving the nestable tags conundrum. | ||
23 | |||
24 | An incomplete list of people have contributed patches to Beautiful | ||
25 | Soup: | ||
26 | |||
27 | Istvan Albert, Andrew Lin, Anthony Baxter, Oliver Beattie, Andrew | ||
28 | Boyko, Tony Chang, Francisco Canas, "Delong", Zephyr Fang, Fuzzy, | ||
29 | Roman Gaufman, Yoni Gilad, Richie Hindle, Toshihiro Kamiya, Peteris | ||
30 | Krumins, Kent Johnson, Marek Kapolka, Andreas Kostyrka, Roel Kramer, | ||
31 | Ben Last, Robert Leftwich, Stefaan Lippens, "liquider", Staffan | ||
32 | Malmgren, Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", | ||
33 | Ed Oskiewicz, Martijn Peters, Greg Phillips, Giles Radford, Stefano | ||
34 | Revera, Arthur Rudolph, Marko Samastur, James Salter, Jouni Seppänen, | ||
35 | Alexander Schmolck, Tim Shirley, Geoffrey Sneddon, Ville Skyttä, | ||
36 | "Vikas", Jens Svalgaard, Andy Theyers, Eric Weiser, Glyn Webster, John | ||
37 | Wiseman, Paul Wright, Danny Yoo | ||
38 | |||
39 | An incomplete list of people who made suggestions or found bugs or | ||
40 | found ways to break Beautiful Soup: | ||
41 | |||
42 | Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, | ||
43 | Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, | ||
44 | Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, | ||
45 | warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, | ||
46 | Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed | ||
47 | Summers, Dennis Sutch, Chris Smith, Aaron Swartz, Stuart | ||
48 | Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de | ||
49 | Sousa Rocha, Yichun Wei, Per Vognsen | ||
diff --git a/bitbake/lib/bs4/AUTHORS.txt b/bitbake/lib/bs4/AUTHORS.txt deleted file mode 100644 index 2ac8fcc8cc..0000000000 --- a/bitbake/lib/bs4/AUTHORS.txt +++ /dev/null | |||
@@ -1,43 +0,0 @@ | |||
1 | Behold, mortal, the origins of Beautiful Soup... | ||
2 | ================================================ | ||
3 | |||
4 | Leonard Richardson is the primary programmer. | ||
5 | |||
6 | Aaron DeVore is awesome. | ||
7 | |||
8 | Mark Pilgrim provided the encoding detection code that forms the base | ||
9 | of UnicodeDammit. | ||
10 | |||
11 | Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful | ||
12 | Soup 4 working under Python 3. | ||
13 | |||
14 | Simon Willison wrote soupselect, which was used to make Beautiful Soup | ||
15 | support CSS selectors. | ||
16 | |||
17 | Sam Ruby helped with a lot of edge cases. | ||
18 | |||
19 | Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his | ||
20 | work in solving the nestable tags conundrum. | ||
21 | |||
22 | An incomplete list of people have contributed patches to Beautiful | ||
23 | Soup: | ||
24 | |||
25 | Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang, | ||
26 | Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris | ||
27 | Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren, | ||
28 | Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed | ||
29 | Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko | ||
30 | Samastur, Jouni Seppänen, Alexander Schmolck, Andy Theyers, Glyn | ||
31 | Webster, Paul Wright, Danny Yoo | ||
32 | |||
33 | An incomplete list of people who made suggestions or found bugs or | ||
34 | found ways to break Beautiful Soup: | ||
35 | |||
36 | Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel, | ||
37 | Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes, | ||
38 | Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams, | ||
39 | warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison, | ||
40 | Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed | ||
41 | Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart | ||
42 | Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de | ||
43 | Sousa Rocha, Yichun Wei, Per Vognsen | ||
diff --git a/bitbake/lib/bs4/NEWS.txt b/bitbake/lib/bs4/CHANGELOG index 88a60a2458..2701446a6d 100644 --- a/bitbake/lib/bs4/NEWS.txt +++ b/bitbake/lib/bs4/CHANGELOG | |||
@@ -1,3 +1,776 @@ | |||
1 | = 4.12.3 (20240117) | ||
2 | |||
3 | * The Beautiful Soup documentation now has a Spanish translation, thanks | ||
4 | to Carlos Romero. Delong Wang's Chinese translation has been updated | ||
5 | to cover Beautiful Soup 4.12.0. | ||
6 | |||
7 | * Fixed a regression such that if you set .hidden on a tag, the tag | ||
8 | becomes invisible but its contents are still visible. User manipulation | ||
9 | of .hidden is not a documented or supported feature, so don't do this, | ||
10 | but it wasn't too difficult to keep the old behavior working. | ||
11 | |||
12 | * Fixed a case found by Mengyuhan where html.parser giving up on | ||
13 | markup would result in an AssertionError instead of a | ||
14 | ParserRejectedMarkup exception. | ||
15 | |||
16 | * Added the correct stacklevel to instances of the XMLParsedAsHTMLWarning. | ||
17 | [bug=2034451] | ||
18 | |||
19 | * Corrected the syntax of the license definition in pyproject.toml. Patch | ||
20 | by Louis Maddox. [bug=2032848] | ||
21 | |||
22 | * Corrected a typo in a test that was causing test failures when run against | ||
23 | libxml2 2.12.1. [bug=2045481] | ||
24 | |||
25 | = 4.12.2 (20230407) | ||
26 | |||
27 | * Fixed an unhandled exception in BeautifulSoup.decode_contents | ||
28 | and methods that call it. [bug=2015545] | ||
29 | |||
30 | = 4.12.1 (20230405) | ||
31 | |||
32 | NOTE: the following things are likely to be dropped in the next | ||
33 | feature release of Beautiful Soup: | ||
34 | |||
35 | Official support for Python 3.6. | ||
36 | Inclusion of unit tests and test data in the wheel file. | ||
37 | Two scripts: demonstrate_parser_differences.py and test-all-versions. | ||
38 | |||
39 | Changes: | ||
40 | |||
41 | * This version of Beautiful Soup replaces setup.py and setup.cfg | ||
42 | with pyproject.toml. Beautiful Soup now uses tox as its test backend | ||
43 | and hatch to do builds. | ||
44 | |||
45 | * The main functional improvement in this version is a nonrecursive technique | ||
46 | for regenerating a tree. This technique is used to avoid situations where, | ||
47 | in previous versions, doing something to a very deeply nested tree | ||
48 | would overflow the Python interpreter stack: | ||
49 | |||
50 | 1. Outputting a tree as a string, e.g. with | ||
51 | BeautifulSoup.encode() [bug=1471755] | ||
52 | |||
53 | 2. Making copies of trees (copy.copy() and | ||
54 | copy.deepcopy() from the Python standard library). [bug=1709837] | ||
55 | |||
56 | 3. Pickling a BeautifulSoup object. (Note that pickling a Tag | ||
57 | object can still cause an overflow.) | ||
58 | |||
59 | * Making a copy of a BeautifulSoup object no longer parses the | ||
60 | document again, which should improve performance significantly. | ||
61 | |||
62 | * When a BeautifulSoup object is unpickled, Beautiful Soup now | ||
63 | tries to associate an appropriate TreeBuilder object with it. | ||
64 | |||
65 | * Tag.prettify() will now consistently end prettified markup with | ||
66 | a newline. | ||
67 | |||
68 | * Added unit tests for fuzz test cases created by third | ||
69 | parties. Some of these tests are skipped since they point | ||
70 | to problems outside of Beautiful Soup, but this change | ||
71 | puts them all in one convenient place. | ||
72 | |||
73 | * PageElement now implements the known_xml attribute. (This was technically | ||
74 | a bug, but it shouldn't be an issue in normal use.) [bug=2007895] | ||
75 | |||
76 | * The demonstrate_parser_differences.py script was still written in | ||
77 | Python 2. I've converted it to Python 3, but since no one has | ||
78 | mentioned this over the years, it's a sign that no one uses this | ||
79 | script and it's not serving its purpose. | ||
80 | |||
81 | = 4.12.0 (20230320) | ||
82 | |||
83 | * Introduced the .css property, which centralizes all access to | ||
84 | the Soup Sieve API. This allows Beautiful Soup to give direct | ||
85 | access to as much of Soup Sieve that makes sense, without cluttering | ||
86 | the BeautifulSoup and Tag classes with a lot of new methods. | ||
87 | |||
88 | This does mean one addition to the BeautifulSoup and Tag classes | ||
89 | (the .css property itself), so this might be a breaking change if you | ||
90 | happen to use Beautiful Soup to parse XML that includes a tag called | ||
91 | <css>. In particular, code like this will stop working in 4.12.0: | ||
92 | |||
93 | soup.css['id'] | ||
94 | |||
95 | Code like this will work just as before: | ||
96 | |||
97 | soup.find_one('css')['id'] | ||
98 | |||
99 | The Soup Sieve methods supported through the .css property are | ||
100 | select(), select_one(), iselect(), closest(), match(), filter(), | ||
101 | escape(), and compile(). The BeautifulSoup and Tag classes still | ||
102 | support the select() and select_one() methods; they have not been | ||
103 | deprecated, but they have been demoted to convenience methods. | ||
104 | |||
105 | [bug=2003677] | ||
106 | |||
107 | * When the html.parser parser decides it can't parse a document, Beautiful | ||
108 | Soup now consistently propagates this fact by raising a | ||
109 | ParserRejectedMarkup error. [bug=2007343] | ||
110 | |||
111 | * Removed some error checking code from diagnose(), which is redundant with | ||
112 | similar (but more Pythonic) code in the BeautifulSoup constructor. | ||
113 | [bug=2007344] | ||
114 | |||
115 | * Added intersphinx references to the documentation so that other | ||
116 | projects have a target to point to when they reference Beautiful | ||
117 | Soup classes. [bug=1453370] | ||
118 | |||
119 | = 4.11.2 (20230131) | ||
120 | |||
121 | * Fixed test failures caused by nondeterministic behavior of | ||
122 | UnicodeDammit's character detection, depending on the platform setup. | ||
123 | [bug=1973072] | ||
124 | |||
125 | * Fixed another crash when overriding multi_valued_attributes and using the | ||
126 | html5lib parser. [bug=1948488] | ||
127 | |||
128 | * The HTMLFormatter and XMLFormatter constructors no longer return a | ||
129 | value. [bug=1992693] | ||
130 | |||
131 | * Tag.interesting_string_types is now propagated when a tag is | ||
132 | copied. [bug=1990400] | ||
133 | |||
134 | * Warnings now do their best to provide an appropriate stacklevel, | ||
135 | improving the usefulness of the message. [bug=1978744] | ||
136 | |||
137 | * Passing a Tag's .contents into PageElement.extend() now works the | ||
138 | same way as passing the Tag itself. | ||
139 | |||
140 | * Soup Sieve tests will be skipped if the library is not installed. | ||
141 | |||
142 | = 4.11.1 (20220408) | ||
143 | |||
144 | This release was done to ensure that the unit tests are packaged along | ||
145 | with the released source. There are no functionality changes in this | ||
146 | release, but there are a few other packaging changes: | ||
147 | |||
148 | * The Japanese and Korean translations of the documentation are included. | ||
149 | * The changelog is now packaged as CHANGELOG, and the license file is | ||
150 | packaged as LICENSE. NEWS.txt and COPYING.txt are still present, | ||
151 | but may be removed in the future. | ||
152 | * TODO.txt is no longer packaged, since a TODO is not relevant for released | ||
153 | code. | ||
154 | |||
155 | = 4.11.0 (20220407) | ||
156 | |||
157 | * Ported unit tests to use pytest. | ||
158 | |||
159 | * Added special string classes, RubyParenthesisString and RubyTextString, | ||
160 | to make it possible to treat ruby text specially in get_text() calls. | ||
161 | [bug=1941980] | ||
162 | |||
163 | * It's now possible to customize the way output is indented by | ||
164 | providing a value for the 'indent' argument to the Formatter | ||
165 | constructor. The 'indent' argument works very similarly to the | ||
166 | argument of the same name in the Python standard library's | ||
167 | json.dump() function. [bug=1955497] | ||
168 | |||
169 | * If the charset-normalizer Python module | ||
170 | (https://pypi.org/project/charset-normalizer/) is installed, Beautiful | ||
171 | Soup will use it to detect the character sets of incoming documents. | ||
172 | This is also the module used by newer versions of the Requests library. | ||
173 | For the sake of backwards compatibility, chardet and cchardet both take | ||
174 | precedence if installed. [bug=1955346] | ||
175 | |||
176 | * Added a workaround for an lxml bug | ||
177 | (https://bugs.launchpad.net/lxml/+bug/1948551) that causes | ||
178 | problems when parsing a Unicode string beginning with BYTE ORDER MARK. | ||
179 | [bug=1947768] | ||
180 | |||
181 | * Issue a warning when an HTML parser is used to parse a document that | ||
182 | looks like XML but not XHTML. [bug=1939121] | ||
183 | |||
184 | * Do a better job of keeping track of namespaces as an XML document is | ||
185 | parsed, so that CSS selectors that use namespaces will do the right | ||
186 | thing more often. [bug=1946243] | ||
187 | |||
188 | * Some time ago, the misleadingly named "text" argument to find-type | ||
189 | methods was renamed to the more accurate "string." But this supposed | ||
190 | "renaming" didn't make it into important places like the method | ||
191 | signatures or the docstrings. That's corrected in this | ||
192 | version. "text" still works, but will give a DeprecationWarning. | ||
193 | [bug=1947038] | ||
194 | |||
195 | * Fixed a crash when pickling a BeautifulSoup object that has no | ||
196 | tree builder. [bug=1934003] | ||
197 | |||
198 | * Fixed a crash when overriding multi_valued_attributes and using the | ||
199 | html5lib parser. [bug=1948488] | ||
200 | |||
201 | * Standardized the wording of the MarkupResemblesLocatorWarning | ||
202 | warnings to omit untrusted input and make the warnings less | ||
203 | judgmental about what you ought to be doing. [bug=1955450] | ||
204 | |||
205 | * Removed support for the iconv_codec library, which doesn't seem | ||
206 | to exist anymore and was never put up on PyPI. (The closest | ||
207 | replacement on PyPI, iconv_codecs, is GPL-licensed, so we can't use | ||
208 | it--it's also quite old.) | ||
209 | |||
210 | = 4.10.0 (20210907) | ||
211 | |||
212 | * This is the first release of Beautiful Soup to only support Python | ||
213 | 3. I dropped Python 2 support to maintain support for newer versions | ||
214 | (58 and up) of setuptools. See: | ||
215 | https://github.com/pypa/setuptools/issues/2769 [bug=1942919] | ||
216 | |||
217 | * The behavior of methods like .get_text() and .strings now differs | ||
218 | depending on the type of tag. The change is visible with HTML tags | ||
219 | like <script>, <style>, and <template>. Starting in 4.9.0, methods | ||
220 | like get_text() returned no results on such tags, because the | ||
221 | contents of those tags are not considered 'text' within the document | ||
222 | as a whole. | ||
223 | |||
224 | But a user who calls script.get_text() is working from a different | ||
225 | definition of 'text' than a user who calls div.get_text()--otherwise | ||
226 | there would be no need to call script.get_text() at all. In 4.10.0, | ||
227 | the contents of (e.g.) a <script> tag are considered 'text' during a | ||
228 | get_text() call on the tag itself, but not considered 'text' during | ||
229 | a get_text() call on the tag's parent. | ||
230 | |||
231 | Because of this change, calling get_text() on each child of a tag | ||
232 | may now return a different result than calling get_text() on the tag | ||
233 | itself. That's because different tags now have different | ||
234 | understandings of what counts as 'text'. [bug=1906226] [bug=1868861] | ||
235 | |||
236 | * NavigableString and its subclasses now implement the get_text() | ||
237 | method, as well as the properties .strings and | ||
238 | .stripped_strings. These methods will either return the string | ||
239 | itself, or nothing, so the only reason to use this is when iterating | ||
240 | over a list of mixed Tag and NavigableString objects. [bug=1904309] | ||
241 | |||
242 | * The 'html5' formatter now treats attributes whose values are the | ||
243 | empty string as HTML boolean attributes. Previously (and in other | ||
244 | formatters), an attribute value must be set as None to be treated as | ||
245 | a boolean attribute. In a future release, I plan to also give this | ||
246 | behavior to the 'html' formatter. Patch by Isaac Muse. [bug=1915424] | ||
247 | |||
248 | * The 'replace_with()' method now takes a variable number of arguments, | ||
249 | and can be used to replace a single element with a sequence of elements. | ||
250 | Patch by Bill Chandos. [rev=605] | ||
251 | |||
252 | * Corrected output when the namespace prefix associated with a | ||
253 | namespaced attribute is the empty string, as opposed to | ||
254 | None. [bug=1915583] | ||
255 | |||
256 | * Performance improvement when processing tags that speeds up overall | ||
257 | tree construction by 2%. Patch by Morotti. [bug=1899358] | ||
258 | |||
259 | * Corrected the use of special string container classes in cases when a | ||
260 | single tag may contain strings with different containers; such as | ||
261 | the <template> tag, which may contain both TemplateString objects | ||
262 | and Comment objects. [bug=1913406] | ||
263 | |||
264 | * The html.parser tree builder can now handle named entities | ||
265 | found in the HTML5 spec in much the same way that the html5lib | ||
266 | tree builder does. Note that the lxml HTML tree builder doesn't handle | ||
267 | named entities this way. [bug=1924908] | ||
268 | |||
269 | * Added a second way to pass specify encodings to UnicodeDammit and | ||
270 | EncodingDetector, based on the order of precedence defined in the | ||
271 | HTML5 spec, starting at: | ||
272 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
273 | |||
274 | Encodings in 'known_definite_encodings' are tried first, then | ||
275 | byte-order-mark sniffing is run, then encodings in 'user_encodings' | ||
276 | are tried. The old argument, 'override_encodings', is now a | ||
277 | deprecated alias for 'known_definite_encodings'. | ||
278 | |||
279 | This changes the default behavior of the html.parser and lxml tree | ||
280 | builders, in a way that may slightly improve encoding | ||
281 | detection but will probably have no effect. [bug=1889014] | ||
282 | |||
283 | * Improve the warning issued when a directory name (as opposed to | ||
284 | the name of a regular file) is passed as markup into the BeautifulSoup | ||
285 | constructor. [bug=1913628] | ||
286 | |||
287 | = 4.9.3 (20201003) | ||
288 | |||
289 | This is the final release of Beautiful Soup to support Python | ||
290 | 2. Beautiful Soup's official support for Python 2 ended on 01 January, | ||
291 | 2021. In the Launchpad Git repository, the final revision to support | ||
292 | Python 2 was revision 70f546b1e689a70e2f103795efce6d261a3dadf7; it is | ||
293 | tagged as "python2". | ||
294 | |||
295 | * Implemented a significant performance optimization to the process of | ||
296 | searching the parse tree. Patch by Morotti. [bug=1898212] | ||
297 | |||
298 | = 4.9.2 (20200926) | ||
299 | |||
300 | * Fixed a bug that caused too many tags to be popped from the tag | ||
301 | stack during tree building, when encountering a closing tag that had | ||
302 | no matching opening tag. [bug=1880420] | ||
303 | |||
304 | * Fixed a bug that inconsistently moved elements over when passing | ||
305 | a Tag, rather than a list, into Tag.extend(). [bug=1885710] | ||
306 | |||
307 | * Specify the soupsieve dependency in a way that complies with | ||
308 | PEP 508. Patch by Mike Nerone. [bug=1893696] | ||
309 | |||
310 | * Change the signatures for BeautifulSoup.insert_before and insert_after | ||
311 | (which are not implemented) to match PageElement.insert_before and | ||
312 | insert_after, quieting warnings in some IDEs. [bug=1897120] | ||
313 | |||
314 | = 4.9.1 (20200517) | ||
315 | |||
316 | * Added a keyword argument 'on_duplicate_attribute' to the | ||
317 | BeautifulSoupHTMLParser constructor (used by the html.parser tree | ||
318 | builder) which lets you customize the handling of markup that | ||
319 | contains the same attribute more than once, as in: | ||
320 | <a href="url1" href="url2"> [bug=1878209] | ||
321 | |||
322 | * Added a distinct subclass, GuessedAtParserWarning, for the warning | ||
323 | issued when BeautifulSoup is instantiated without a parser being | ||
324 | specified. [bug=1873787] | ||
325 | |||
326 | * Added a distinct subclass, MarkupResemblesLocatorWarning, for the | ||
327 | warning issued when BeautifulSoup is instantiated with 'markup' that | ||
328 | actually seems to be a URL or the path to a file on | ||
329 | disk. [bug=1873787] | ||
330 | |||
331 | * The new NavigableString subclasses (Stylesheet, Script, and | ||
332 | TemplateString) can now be imported directly from the bs4 package. | ||
333 | |||
334 | * If you encode a document with a Python-specific encoding like | ||
335 | 'unicode_escape', that encoding is no longer mentioned in the final | ||
336 | XML or HTML document. Instead, encoding information is omitted or | ||
337 | left blank. [bug=1874955] | ||
338 | |||
339 | * Fixed test failures when run against soupselect 2.0. Patch by Tomáš | ||
340 | Chvátal. [bug=1872279] | ||
341 | |||
342 | = 4.9.0 (20200405) | ||
343 | |||
344 | * Added PageElement.decomposed, a new property which lets you | ||
345 | check whether you've already called decompose() on a Tag or | ||
346 | NavigableString. | ||
347 | |||
348 | * Embedded CSS and Javascript is now stored in distinct Stylesheet and | ||
349 | Script tags, which are ignored by methods like get_text() since most | ||
350 | people don't consider this sort of content to be 'text'. This | ||
351 | feature is not supported by the html5lib treebuilder. [bug=1868861] | ||
352 | |||
353 | * Added a Russian translation by 'authoress' to the repository. | ||
354 | |||
355 | * Fixed an unhandled exception when formatting a Tag that had been | ||
356 | decomposed.[bug=1857767] | ||
357 | |||
358 | * Fixed a bug that happened when passing a Unicode filename containing | ||
359 | non-ASCII characters as markup into Beautiful Soup, on a system that | ||
360 | allows Unicode filenames. [bug=1866717] | ||
361 | |||
362 | * Added a performance optimization to PageElement.extract(). Patch by | ||
363 | Arthur Darcet. | ||
364 | |||
365 | = 4.8.2 (20191224) | ||
366 | |||
367 | * Added Python docstrings to all public methods of the most commonly | ||
368 | used classes. | ||
369 | |||
370 | * Added a Chinese translation by Deron Wang and a Brazilian Portuguese | ||
371 | translation by Cezar Peixeiro to the repository. | ||
372 | |||
373 | * Fixed two deprecation warnings. Patches by Colin | ||
374 | Watson and Nicholas Neumann. [bug=1847592] [bug=1855301] | ||
375 | |||
376 | * The html.parser tree builder now correctly handles DOCTYPEs that are | ||
377 | not uppercase. [bug=1848401] | ||
378 | |||
379 | * PageElement.select() now returns a ResultSet rather than a regular | ||
380 | list, making it consistent with methods like find_all(). | ||
381 | |||
382 | = 4.8.1 (20191006) | ||
383 | |||
384 | * When the html.parser or html5lib parsers are in use, Beautiful Soup | ||
385 | will, by default, record the position in the original document where | ||
386 | each tag was encountered. This includes line number (Tag.sourceline) | ||
387 | and position within a line (Tag.sourcepos). Based on code by Chris | ||
388 | Mayo. [bug=1742921] | ||
389 | |||
390 | * When instantiating a BeautifulSoup object, it's now possible to | ||
391 | provide a dictionary ('element_classes') of the classes you'd like to be | ||
392 | instantiated instead of Tag, NavigableString, etc. | ||
393 | |||
394 | * Fixed the definition of the default XML namespace when using | ||
395 | lxml 4.4. Patch by Isaac Muse. [bug=1840141] | ||
396 | |||
397 | * Fixed a crash when pretty-printing tags that were not created | ||
398 | during initial parsing. [bug=1838903] | ||
399 | |||
400 | * Copying a Tag preserves information that was originally obtained from | ||
401 | the TreeBuilder used to build the original Tag. [bug=1838903] | ||
402 | |||
403 | * Raise an explanatory exception when the underlying parser | ||
404 | completely rejects the incoming markup. [bug=1838877] | ||
405 | |||
406 | * Avoid a crash when trying to detect the declared encoding of a | ||
407 | Unicode document. [bug=1838877] | ||
408 | |||
409 | * Avoid a crash when unpickling certain parse trees generated | ||
410 | using html5lib on Python 3. [bug=1843545] | ||
411 | |||
412 | = 4.8.0 (20190720, "One Small Soup") | ||
413 | |||
414 | This release focuses on making it easier to customize Beautiful Soup's | ||
415 | input mechanism (the TreeBuilder) and output mechanism (the Formatter). | ||
416 | |||
417 | * You can customize the TreeBuilder object by passing keyword | ||
418 | arguments into the BeautifulSoup constructor. Those keyword | ||
419 | arguments will be passed along into the TreeBuilder constructor. | ||
420 | |||
421 | The main reason to do this right now is to change how which | ||
422 | attributes are treated as multi-valued attributes (the way 'class' | ||
423 | is treated by default). You can do this with the | ||
424 | 'multi_valued_attributes' argument. [bug=1832978] | ||
425 | |||
426 | * The role of Formatter objects has been greatly expanded. The Formatter | ||
427 | class now controls the following: | ||
428 | |||
429 | - The function to call to perform entity substitution. (This was | ||
430 | previously Formatter's only job.) | ||
431 | - Which tags should be treated as containing CDATA and have their | ||
432 | contents exempt from entity substitution. | ||
433 | - The order in which a tag's attributes are output. [bug=1812422] | ||
434 | - Whether or not to put a '/' inside a void element, e.g. '<br/>' vs '<br>' | ||
435 | |||
436 | All preexisting code should work as before. | ||
437 | |||
438 | * Added a new method to the API, Tag.smooth(), which consolidates | ||
439 | multiple adjacent NavigableString elements. [bug=1697296] | ||
440 | |||
441 | * ' (which is valid in XML, XHTML, and HTML 5, but not HTML 4) is always | ||
442 | recognized as a named entity and converted to a single quote. [bug=1818721] | ||
443 | |||
444 | = 4.7.1 (20190106) | ||
445 | |||
446 | * Fixed a significant performance problem introduced in 4.7.0. [bug=1810617] | ||
447 | |||
448 | * Fixed an incorrectly raised exception when inserting a tag before or | ||
449 | after an identical tag. [bug=1810692] | ||
450 | |||
451 | * Beautiful Soup will no longer try to keep track of namespaces that | ||
452 | are not defined with a prefix; this can confuse soupselect. [bug=1810680] | ||
453 | |||
454 | * Tried even harder to avoid the deprecation warning originally fixed in | ||
455 | 4.6.1. [bug=1778909] | ||
456 | |||
457 | = 4.7.0 (20181231) | ||
458 | |||
459 | * Beautiful Soup's CSS Selector implementation has been replaced by a | ||
460 | dependency on Isaac Muse's SoupSieve project (the soupsieve package | ||
461 | on PyPI). The good news is that SoupSieve has a much more robust and | ||
462 | complete implementation of CSS selectors, resolving a large number | ||
463 | of longstanding issues. The bad news is that from this point onward, | ||
464 | SoupSieve must be installed if you want to use the select() method. | ||
465 | |||
466 | You don't have to change anything lf you installed Beautiful Soup | ||
467 | through pip (SoupSieve will be automatically installed when you | ||
468 | upgrade Beautiful Soup) or if you don't use CSS selectors from | ||
469 | within Beautiful Soup. | ||
470 | |||
471 | SoupSieve documentation: https://facelessuser.github.io/soupsieve/ | ||
472 | |||
473 | * Added the PageElement.extend() method, which works like list.append(). | ||
474 | [bug=1514970] | ||
475 | |||
476 | * PageElement.insert_before() and insert_after() now take a variable | ||
477 | number of arguments. [bug=1514970] | ||
478 | |||
479 | * Fix a number of problems with the tree builder that caused | ||
480 | trees that were superficially okay, but which fell apart when bits | ||
481 | were extracted. Patch by Isaac Muse. [bug=1782928,1809910] | ||
482 | |||
483 | * Fixed a problem with the tree builder in which elements that | ||
484 | contained no content (such as empty comments and all-whitespace | ||
485 | elements) were not being treated as part of the tree. Patch by Isaac | ||
486 | Muse. [bug=1798699] | ||
487 | |||
488 | * Fixed a problem with multi-valued attributes where the value | ||
489 | contained whitespace. Thanks to Jens Svalgaard for the | ||
490 | fix. [bug=1787453] | ||
491 | |||
492 | * Clarified ambiguous license statements in the source code. Beautiful | ||
493 | Soup is released under the MIT license, and has been since 4.4.0. | ||
494 | |||
495 | * This file has been renamed from NEWS.txt to CHANGELOG. | ||
496 | |||
497 | = 4.6.3 (20180812) | ||
498 | |||
499 | * Exactly the same as 4.6.2. Re-released to make the README file | ||
500 | render properly on PyPI. | ||
501 | |||
502 | = 4.6.2 (20180812) | ||
503 | |||
504 | * Fix an exception when a custom formatter was asked to format a void | ||
505 | element. [bug=1784408] | ||
506 | |||
507 | = 4.6.1 (20180728) | ||
508 | |||
509 | * Stop data loss when encountering an empty numeric entity, and | ||
510 | possibly in other cases. Thanks to tos.kamiya for the fix. [bug=1698503] | ||
511 | |||
512 | * Preserve XML namespaces introduced inside an XML document, not just | ||
513 | the ones introduced at the top level. [bug=1718787] | ||
514 | |||
515 | * Added a new formatter, "html5", which represents void elements | ||
516 | as "<element>" rather than "<element/>". [bug=1716272] | ||
517 | |||
518 | * Fixed a problem where the html.parser tree builder interpreted | ||
519 | a string like "&foo " as the character entity "&foo;" [bug=1728706] | ||
520 | |||
521 | * Correctly handle invalid HTML numeric character entities like “ | ||
522 | which reference code points that are not Unicode code points. Note | ||
523 | that this is only fixed when Beautiful Soup is used with the | ||
524 | html.parser parser -- html5lib already worked and I couldn't fix it | ||
525 | with lxml. [bug=1782933] | ||
526 | |||
527 | * Improved the warning given when no parser is specified. [bug=1780571] | ||
528 | |||
529 | * When markup contains duplicate elements, a select() call that | ||
530 | includes multiple match clauses will match all relevant | ||
531 | elements. [bug=1770596] | ||
532 | |||
533 | * Fixed code that was causing deprecation warnings in recent Python 3 | ||
534 | versions. Includes a patch from Ville Skyttä. [bug=1778909] [bug=1689496] | ||
535 | |||
536 | * Fixed a Windows crash in diagnose() when checking whether a long | ||
537 | markup string is a filename. [bug=1737121] | ||
538 | |||
539 | * Stopped HTMLParser from raising an exception in very rare cases of | ||
540 | bad markup. [bug=1708831] | ||
541 | |||
542 | * Fixed a bug where find_all() was not working when asked to find a | ||
543 | tag with a namespaced name in an XML document that was parsed as | ||
544 | HTML. [bug=1723783] | ||
545 | |||
546 | * You can get finer control over formatting by subclassing | ||
547 | bs4.element.Formatter and passing a Formatter instance into (e.g.) | ||
548 | encode(). [bug=1716272] | ||
549 | |||
550 | * You can pass a dictionary of `attrs` into | ||
551 | BeautifulSoup.new_tag. This makes it possible to create a tag with | ||
552 | an attribute like 'name' that would otherwise be masked by another | ||
553 | argument of new_tag. [bug=1779276] | ||
554 | |||
555 | * Clarified the deprecation warning when accessing tag.fooTag, to cover | ||
556 | the possibility that you might really have been looking for a tag | ||
557 | called 'fooTag'. | ||
558 | |||
559 | = 4.6.0 (20170507) = | ||
560 | |||
561 | * Added the `Tag.get_attribute_list` method, which acts like `Tag.get` for | ||
562 | getting the value of an attribute, but which always returns a list, | ||
563 | whether or not the attribute is a multi-value attribute. [bug=1678589] | ||
564 | |||
565 | * It's now possible to use a tag's namespace prefix when searching, | ||
566 | e.g. soup.find('namespace:tag') [bug=1655332] | ||
567 | |||
568 | * Improved the handling of empty-element tags like <br> when using the | ||
569 | html.parser parser. [bug=1676935] | ||
570 | |||
571 | * HTML parsers treat all HTML4 and HTML5 empty element tags (aka void | ||
572 | element tags) correctly. [bug=1656909] | ||
573 | |||
574 | * Namespace prefix is preserved when an XML tag is copied. Thanks | ||
575 | to Vikas for a patch and test. [bug=1685172] | ||
576 | |||
577 | = 4.5.3 (20170102) = | ||
578 | |||
579 | * Fixed foster parenting when html5lib is the tree builder. Thanks to | ||
580 | Geoffrey Sneddon for a patch and test. | ||
581 | |||
582 | * Fixed yet another problem that caused the html5lib tree builder to | ||
583 | create a disconnected parse tree. [bug=1629825] | ||
584 | |||
585 | = 4.5.2 (20170102) = | ||
586 | |||
587 | * Apart from the version number, this release is identical to | ||
588 | 4.5.3. Due to user error, it could not be completely uploaded to | ||
589 | PyPI. Use 4.5.3 instead. | ||
590 | |||
591 | = 4.5.1 (20160802) = | ||
592 | |||
593 | * Fixed a crash when passing Unicode markup that contained a | ||
594 | processing instruction into the lxml HTML parser on Python | ||
595 | 3. [bug=1608048] | ||
596 | |||
597 | = 4.5.0 (20160719) = | ||
598 | |||
599 | * Beautiful Soup is no longer compatible with Python 2.6. This | ||
600 | actually happened a few releases ago, but it's now official. | ||
601 | |||
602 | * Beautiful Soup will now work with versions of html5lib greater than | ||
603 | 0.99999999. [bug=1603299] | ||
604 | |||
605 | * If a search against each individual value of a multi-valued | ||
606 | attribute fails, the search will be run one final time against the | ||
607 | complete attribute value considered as a single string. That is, if | ||
608 | a tag has class="foo bar" and neither "foo" nor "bar" matches, but | ||
609 | "foo bar" does, the tag is now considered a match. | ||
610 | |||
611 | This happened in previous versions, but only when the value being | ||
612 | searched for was a string. Now it also works when that value is | ||
613 | a regular expression, a list of strings, etc. [bug=1476868] | ||
614 | |||
615 | * Fixed a bug that deranged the tree when a whitespace element was | ||
616 | reparented into a tag that contained an identical whitespace | ||
617 | element. [bug=1505351] | ||
618 | |||
619 | * Added support for CSS selector values that contain quoted spaces, | ||
620 | such as tag[style="display: foo"]. [bug=1540588] | ||
621 | |||
622 | * Corrected handling of XML processing instructions. [bug=1504393] | ||
623 | |||
624 | * Corrected an encoding error that happened when a BeautifulSoup | ||
625 | object was copied. [bug=1554439] | ||
626 | |||
627 | * The contents of <textarea> tags will no longer be modified when the | ||
628 | tree is prettified. [bug=1555829] | ||
629 | |||
630 | * When a BeautifulSoup object is pickled but its tree builder cannot | ||
631 | be pickled, its .builder attribute is set to None instead of being | ||
632 | destroyed. This avoids a performance problem once the object is | ||
633 | unpickled. [bug=1523629] | ||
634 | |||
635 | * Specify the file and line number when warning about a | ||
636 | BeautifulSoup object being instantiated without a parser being | ||
637 | specified. [bug=1574647] | ||
638 | |||
639 | * The `limit` argument to `select()` now works correctly, though it's | ||
640 | not implemented very efficiently. [bug=1520530] | ||
641 | |||
642 | * Fixed a Python 3 ByteWarning when a URL was passed in as though it | ||
643 | were markup. Thanks to James Salter for a patch and | ||
644 | test. [bug=1533762] | ||
645 | |||
646 | * We don't run the check for a filename passed in as markup if the | ||
647 | 'filename' contains a less-than character; the less-than character | ||
648 | indicates it's most likely a very small document. [bug=1577864] | ||
649 | |||
650 | = 4.4.1 (20150928) = | ||
651 | |||
652 | * Fixed a bug that deranged the tree when part of it was | ||
653 | removed. Thanks to Eric Weiser for the patch and John Wiseman for a | ||
654 | test. [bug=1481520] | ||
655 | |||
656 | * Fixed a parse bug with the html5lib tree-builder. Thanks to Roel | ||
657 | Kramer for the patch. [bug=1483781] | ||
658 | |||
659 | * Improved the implementation of CSS selector grouping. Thanks to | ||
660 | Orangain for the patch. [bug=1484543] | ||
661 | |||
662 | * Fixed the test_detect_utf8 test so that it works when chardet is | ||
663 | installed. [bug=1471359] | ||
664 | |||
665 | * Corrected the output of Declaration objects. [bug=1477847] | ||
666 | |||
667 | |||
668 | = 4.4.0 (20150703) = | ||
669 | |||
670 | Especially important changes: | ||
671 | |||
672 | * Added a warning when you instantiate a BeautifulSoup object without | ||
673 | explicitly naming a parser. [bug=1398866] | ||
674 | |||
675 | * __repr__ now returns an ASCII bytestring in Python 2, and a Unicode | ||
676 | string in Python 3, instead of a UTF8-encoded bytestring in both | ||
677 | versions. In Python 3, __str__ now returns a Unicode string instead | ||
678 | of a bytestring. [bug=1420131] | ||
679 | |||
680 | * The `text` argument to the find_* methods is now called `string`, | ||
681 | which is more accurate. `text` still works, but `string` is the | ||
682 | argument described in the documentation. `text` may eventually | ||
683 | change its meaning, but not for a very long time. [bug=1366856] | ||
684 | |||
685 | * Changed the way soup objects work under copy.copy(). Copying a | ||
686 | NavigableString or a Tag will give you a new NavigableString that's | ||
687 | equal to the old one but not connected to the parse tree. Patch by | ||
688 | Martijn Peters. [bug=1307490] | ||
689 | |||
690 | * Started using a standard MIT license. [bug=1294662] | ||
691 | |||
692 | * Added a Chinese translation of the documentation by Delong .w. | ||
693 | |||
694 | New features: | ||
695 | |||
696 | * Introduced the select_one() method, which uses a CSS selector but | ||
697 | only returns the first match, instead of a list of | ||
698 | matches. [bug=1349367] | ||
699 | |||
700 | * You can now create a Tag object without specifying a | ||
701 | TreeBuilder. Patch by Martijn Pieters. [bug=1307471] | ||
702 | |||
703 | * You can now create a NavigableString or a subclass just by invoking | ||
704 | the constructor. [bug=1294315] | ||
705 | |||
706 | * Added an `exclude_encodings` argument to UnicodeDammit and to the | ||
707 | Beautiful Soup constructor, which lets you prohibit the detection of | ||
708 | an encoding that you know is wrong. [bug=1469408] | ||
709 | |||
710 | * The select() method now supports selector grouping. Patch by | ||
711 | Francisco Canas [bug=1191917] | ||
712 | |||
713 | Bug fixes: | ||
714 | |||
715 | * Fixed yet another problem that caused the html5lib tree builder to | ||
716 | create a disconnected parse tree. [bug=1237763] | ||
717 | |||
718 | * Force object_was_parsed() to keep the tree intact even when an element | ||
719 | from later in the document is moved into place. [bug=1430633] | ||
720 | |||
721 | * Fixed yet another bug that caused a disconnected tree when html5lib | ||
722 | copied an element from one part of the tree to another. [bug=1270611] | ||
723 | |||
724 | * Fixed a bug where Element.extract() could create an infinite loop in | ||
725 | the remaining tree. | ||
726 | |||
727 | * The select() method can now find tags whose names contain | ||
728 | dashes. Patch by Francisco Canas. [bug=1276211] | ||
729 | |||
730 | * The select() method can now find tags with attributes whose names | ||
731 | contain dashes. Patch by Marek Kapolka. [bug=1304007] | ||
732 | |||
733 | * Improved the lxml tree builder's handling of processing | ||
734 | instructions. [bug=1294645] | ||
735 | |||
736 | * Restored the helpful syntax error that happens when you try to | ||
737 | import the Python 2 edition of Beautiful Soup under Python | ||
738 | 3. [bug=1213387] | ||
739 | |||
740 | * In Python 3.4 and above, set the new convert_charrefs argument to | ||
741 | the html.parser constructor to avoid a warning and future | ||
742 | failures. Patch by Stefano Revera. [bug=1375721] | ||
743 | |||
744 | * The warning when you pass in a filename or URL as markup will now be | ||
745 | displayed correctly even if the filename or URL is a Unicode | ||
746 | string. [bug=1268888] | ||
747 | |||
748 | * If the initial <html> tag contains a CDATA list attribute such as | ||
749 | 'class', the html5lib tree builder will now turn its value into a | ||
750 | list, as it would with any other tag. [bug=1296481] | ||
751 | |||
752 | * Fixed an import error in Python 3.5 caused by the removal of the | ||
753 | HTMLParseError class. [bug=1420063] | ||
754 | |||
755 | * Improved docstring for encode_contents() and | ||
756 | decode_contents(). [bug=1441543] | ||
757 | |||
758 | * Fixed a crash in Unicode, Dammit's encoding detector when the name | ||
759 | of the encoding itself contained invalid bytes. [bug=1360913] | ||
760 | |||
761 | * Improved the exception raised when you call .unwrap() or | ||
762 | .replace_with() on an element that's not attached to a tree. | ||
763 | |||
764 | * Raise a NotImplementedError whenever an unsupported CSS pseudoclass | ||
765 | is used in select(). Previously some cases did not result in a | ||
766 | NotImplementedError. | ||
767 | |||
768 | * It's now possible to pickle a BeautifulSoup object no matter which | ||
769 | tree builder was used to create it. However, the only tree builder | ||
770 | that survives the pickling process is the HTMLParserTreeBuilder | ||
771 | ('html.parser'). If you unpickle a BeautifulSoup object created with | ||
772 | some other tree builder, soup.builder will be None. [bug=1231545] | ||
773 | |||
1 | = 4.3.2 (20131002) = | 774 | = 4.3.2 (20131002) = |
2 | 775 | ||
3 | * Fixed a bug in which short Unicode input was improperly encoded to | 776 | * Fixed a bug in which short Unicode input was improperly encoded to |
@@ -331,7 +1104,7 @@ | |||
331 | * Renamed Tag.nsprefix to Tag.prefix, for consistency with | 1104 | * Renamed Tag.nsprefix to Tag.prefix, for consistency with |
332 | NamespacedAttribute. | 1105 | NamespacedAttribute. |
333 | 1106 | ||
334 | * Fixed a test failure that occured on Python 3.x when chardet was | 1107 | * Fixed a test failure that occurred on Python 3.x when chardet was |
335 | installed. | 1108 | installed. |
336 | 1109 | ||
337 | * Made prettify() return Unicode by default, so it will look nice on | 1110 | * Made prettify() return Unicode by default, so it will look nice on |
@@ -365,7 +1138,7 @@ | |||
365 | 1138 | ||
366 | * Restored compatibility with Python 2.6. | 1139 | * Restored compatibility with Python 2.6. |
367 | 1140 | ||
368 | * The install process no longer installs docs or auxillary text files. | 1141 | * The install process no longer installs docs or auxiliary text files. |
369 | 1142 | ||
370 | * It's now possible to deepcopy a BeautifulSoup object created with | 1143 | * It's now possible to deepcopy a BeautifulSoup object created with |
371 | Python's built-in HTML parser. | 1144 | Python's built-in HTML parser. |
@@ -604,7 +1377,7 @@ Added an import that makes BS work in Python 2.3. | |||
604 | Fixed a UnicodeDecodeError when unpickling documents that contain | 1377 | Fixed a UnicodeDecodeError when unpickling documents that contain |
605 | non-ASCII characters. | 1378 | non-ASCII characters. |
606 | 1379 | ||
607 | Fixed a TypeError that occured in some circumstances when a tag | 1380 | Fixed a TypeError that occurred in some circumstances when a tag |
608 | contained no text. | 1381 | contained no text. |
609 | 1382 | ||
610 | Jump through hoops to avoid the use of chardet, which can be extremely | 1383 | Jump through hoops to avoid the use of chardet, which can be extremely |
diff --git a/bitbake/lib/bs4/COPYING.txt b/bitbake/lib/bs4/LICENSE index d668d13f04..08e3a9cf8c 100644 --- a/bitbake/lib/bs4/COPYING.txt +++ b/bitbake/lib/bs4/LICENSE | |||
@@ -1,6 +1,6 @@ | |||
1 | Beautiful Soup is made available under the MIT license: | 1 | Beautiful Soup is made available under the MIT license: |
2 | 2 | ||
3 | Copyright (c) 2004-2012 Leonard Richardson | 3 | Copyright (c) Leonard Richardson |
4 | 4 | ||
5 | Permission is hereby granted, free of charge, to any person obtaining | 5 | Permission is hereby granted, free of charge, to any person obtaining |
6 | a copy of this software and associated documentation files (the | 6 | a copy of this software and associated documentation files (the |
@@ -20,7 +20,12 @@ Beautiful Soup is made available under the MIT license: | |||
20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN | 20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN |
21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN | 21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | 22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
23 | SOFTWARE, DAMMIT. | 23 | SOFTWARE. |
24 | 24 | ||
25 | Beautiful Soup incorporates code from the html5lib library, which is | 25 | Beautiful Soup incorporates code from the html5lib library, which is |
26 | also made available under the MIT license. | 26 | also made available under the MIT license. Copyright (c) James Graham |
27 | and other contributors | ||
28 | |||
29 | Beautiful Soup has an optional dependency on the soupsieve library, | ||
30 | which is also made available under the MIT license. Copyright (c) | ||
31 | Isaac Muse | ||
diff --git a/bitbake/lib/bs4/__init__.py b/bitbake/lib/bs4/__init__.py index e35725b86e..d8ad5e1dc1 100644 --- a/bitbake/lib/bs4/__init__.py +++ b/bitbake/lib/bs4/__init__.py | |||
@@ -1,65 +1,99 @@ | |||
1 | """Beautiful Soup | 1 | """Beautiful Soup Elixir and Tonic - "The Screen-Scraper's Friend". |
2 | Elixir and Tonic | 2 | |
3 | "The Screen-Scraper's Friend" | ||
4 | http://www.crummy.com/software/BeautifulSoup/ | 3 | http://www.crummy.com/software/BeautifulSoup/ |
5 | 4 | ||
6 | Beautiful Soup uses a pluggable XML or HTML parser to parse a | 5 | Beautiful Soup uses a pluggable XML or HTML parser to parse a |
7 | (possibly invalid) document into a tree representation. Beautiful Soup | 6 | (possibly invalid) document into a tree representation. Beautiful Soup |
8 | provides provides methods and Pythonic idioms that make it easy to | 7 | provides methods and Pythonic idioms that make it easy to navigate, |
9 | navigate, search, and modify the parse tree. | 8 | search, and modify the parse tree. |
10 | 9 | ||
11 | Beautiful Soup works with Python 2.6 and up. It works better if lxml | 10 | Beautiful Soup works with Python 3.6 and up. It works better if lxml |
12 | and/or html5lib is installed. | 11 | and/or html5lib is installed. |
13 | 12 | ||
14 | For more than you ever wanted to know about Beautiful Soup, see the | 13 | For more than you ever wanted to know about Beautiful Soup, see the |
15 | documentation: | 14 | documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ |
16 | http://www.crummy.com/software/BeautifulSoup/bs4/doc/ | ||
17 | """ | 15 | """ |
18 | 16 | ||
19 | __author__ = "Leonard Richardson (leonardr@segfault.org)" | 17 | __author__ = "Leonard Richardson (leonardr@segfault.org)" |
20 | __version__ = "4.4.1" | 18 | __version__ = "4.12.3" |
21 | __copyright__ = "Copyright (c) 2004-2015 Leonard Richardson" | 19 | __copyright__ = "Copyright (c) 2004-2024 Leonard Richardson" |
20 | # Use of this source code is governed by the MIT license. | ||
22 | __license__ = "MIT" | 21 | __license__ = "MIT" |
23 | 22 | ||
24 | __all__ = ['BeautifulSoup'] | 23 | __all__ = ['BeautifulSoup'] |
25 | 24 | ||
25 | from collections import Counter | ||
26 | import os | 26 | import os |
27 | import re | 27 | import re |
28 | import sys | ||
29 | import traceback | ||
28 | import warnings | 30 | import warnings |
29 | 31 | ||
30 | from .builder import builder_registry, ParserRejectedMarkup | 32 | # The very first thing we do is give a useful error if someone is |
33 | # running this code under Python 2. | ||
34 | if sys.version_info.major < 3: | ||
35 | raise ImportError('You are trying to use a Python 3-specific version of Beautiful Soup under Python 2. This will not work. The final version of Beautiful Soup to support Python 2 was 4.9.3.') | ||
36 | |||
37 | from .builder import ( | ||
38 | builder_registry, | ||
39 | ParserRejectedMarkup, | ||
40 | XMLParsedAsHTMLWarning, | ||
41 | HTMLParserTreeBuilder | ||
42 | ) | ||
31 | from .dammit import UnicodeDammit | 43 | from .dammit import UnicodeDammit |
32 | from .element import ( | 44 | from .element import ( |
33 | CData, | 45 | CData, |
34 | Comment, | 46 | Comment, |
47 | CSS, | ||
35 | DEFAULT_OUTPUT_ENCODING, | 48 | DEFAULT_OUTPUT_ENCODING, |
36 | Declaration, | 49 | Declaration, |
37 | Doctype, | 50 | Doctype, |
38 | NavigableString, | 51 | NavigableString, |
39 | PageElement, | 52 | PageElement, |
40 | ProcessingInstruction, | 53 | ProcessingInstruction, |
54 | PYTHON_SPECIFIC_ENCODINGS, | ||
41 | ResultSet, | 55 | ResultSet, |
56 | Script, | ||
57 | Stylesheet, | ||
42 | SoupStrainer, | 58 | SoupStrainer, |
43 | Tag, | 59 | Tag, |
60 | TemplateString, | ||
44 | ) | 61 | ) |
45 | 62 | ||
46 | # The very first thing we do is give a useful error if someone is | 63 | # Define some custom warnings. |
47 | # running this code under Python 3 without converting it. | 64 | class GuessedAtParserWarning(UserWarning): |
48 | 'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' | 65 | """The warning issued when BeautifulSoup has to guess what parser to |
66 | use -- probably because no parser was specified in the constructor. | ||
67 | """ | ||
49 | 68 | ||
50 | class BeautifulSoup(Tag): | 69 | class MarkupResemblesLocatorWarning(UserWarning): |
70 | """The warning issued when BeautifulSoup is given 'markup' that | ||
71 | actually looks like a resource locator -- a URL or a path to a file | ||
72 | on disk. | ||
51 | """ | 73 | """ |
52 | This class defines the basic interface called by the tree builders. | ||
53 | 74 | ||
54 | These methods will be called by the parser: | 75 | |
55 | reset() | 76 | class BeautifulSoup(Tag): |
56 | feed(markup) | 77 | """A data structure representing a parsed HTML or XML document. |
78 | |||
79 | Most of the methods you'll call on a BeautifulSoup object are inherited from | ||
80 | PageElement or Tag. | ||
81 | |||
82 | Internally, this class defines the basic interface called by the | ||
83 | tree builders when converting an HTML/XML document into a data | ||
84 | structure. The interface abstracts away the differences between | ||
85 | parsers. To write a new tree builder, you'll need to understand | ||
86 | these methods as a whole. | ||
87 | |||
88 | These methods will be called by the BeautifulSoup constructor: | ||
89 | * reset() | ||
90 | * feed(markup) | ||
57 | 91 | ||
58 | The tree builder may call these methods from its feed() implementation: | 92 | The tree builder may call these methods from its feed() implementation: |
59 | handle_starttag(name, attrs) # See note about return value | 93 | * handle_starttag(name, attrs) # See note about return value |
60 | handle_endtag(name) | 94 | * handle_endtag(name) |
61 | handle_data(data) # Appends to the current data node | 95 | * handle_data(data) # Appends to the current data node |
62 | endData(containerClass=NavigableString) # Ends the current data node | 96 | * endData(containerClass) # Ends the current data node |
63 | 97 | ||
64 | No matter how complicated the underlying parser is, you should be | 98 | No matter how complicated the underlying parser is, you should be |
65 | able to build a tree using 'start tag' events, 'end tag' events, | 99 | able to build a tree using 'start tag' events, 'end tag' events, |
@@ -69,24 +103,77 @@ class BeautifulSoup(Tag): | |||
69 | like HTML's <br> tag), call handle_starttag and then | 103 | like HTML's <br> tag), call handle_starttag and then |
70 | handle_endtag. | 104 | handle_endtag. |
71 | """ | 105 | """ |
106 | |||
107 | # Since BeautifulSoup subclasses Tag, it's possible to treat it as | ||
108 | # a Tag with a .name. This name makes it clear the BeautifulSoup | ||
109 | # object isn't a real markup tag. | ||
72 | ROOT_TAG_NAME = '[document]' | 110 | ROOT_TAG_NAME = '[document]' |
73 | 111 | ||
74 | # If the end-user gives no indication which tree builder they | 112 | # If the end-user gives no indication which tree builder they |
75 | # want, look for one with these features. | 113 | # want, look for one with these features. |
76 | DEFAULT_BUILDER_FEATURES = ['html', 'fast'] | 114 | DEFAULT_BUILDER_FEATURES = ['html', 'fast'] |
77 | 115 | ||
116 | # A string containing all ASCII whitespace characters, used in | ||
117 | # endData() to detect data chunks that seem 'empty'. | ||
78 | ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' | 118 | ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' |
79 | 119 | ||
80 | NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n" | 120 | NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, pass the additional argument 'features=\"%(parser)s\"' to the BeautifulSoup constructor.\n" |
81 | 121 | ||
82 | def __init__(self, markup="", features=None, builder=None, | 122 | def __init__(self, markup="", features=None, builder=None, |
83 | parse_only=None, from_encoding=None, exclude_encodings=None, | 123 | parse_only=None, from_encoding=None, exclude_encodings=None, |
84 | **kwargs): | 124 | element_classes=None, **kwargs): |
85 | """The Soup object is initialized as the 'root tag', and the | 125 | """Constructor. |
86 | provided markup (which can be a string or a file-like object) | 126 | |
87 | is fed into the underlying parser.""" | 127 | :param markup: A string or a file-like object representing |
88 | 128 | markup to be parsed. | |
129 | |||
130 | :param features: Desirable features of the parser to be | ||
131 | used. This may be the name of a specific parser ("lxml", | ||
132 | "lxml-xml", "html.parser", or "html5lib") or it may be the | ||
133 | type of markup to be used ("html", "html5", "xml"). It's | ||
134 | recommended that you name a specific parser, so that | ||
135 | Beautiful Soup gives you the same results across platforms | ||
136 | and virtual environments. | ||
137 | |||
138 | :param builder: A TreeBuilder subclass to instantiate (or | ||
139 | instance to use) instead of looking one up based on | ||
140 | `features`. You only need to use this if you've implemented a | ||
141 | custom TreeBuilder. | ||
142 | |||
143 | :param parse_only: A SoupStrainer. Only parts of the document | ||
144 | matching the SoupStrainer will be considered. This is useful | ||
145 | when parsing part of a document that would otherwise be too | ||
146 | large to fit into memory. | ||
147 | |||
148 | :param from_encoding: A string indicating the encoding of the | ||
149 | document to be parsed. Pass this in if Beautiful Soup is | ||
150 | guessing wrongly about the document's encoding. | ||
151 | |||
152 | :param exclude_encodings: A list of strings indicating | ||
153 | encodings known to be wrong. Pass this in if you don't know | ||
154 | the document's encoding but you know Beautiful Soup's guess is | ||
155 | wrong. | ||
156 | |||
157 | :param element_classes: A dictionary mapping BeautifulSoup | ||
158 | classes like Tag and NavigableString, to other classes you'd | ||
159 | like to be instantiated instead as the parse tree is | ||
160 | built. This is useful for subclassing Tag or NavigableString | ||
161 | to modify default behavior. | ||
162 | |||
163 | :param kwargs: For backwards compatibility purposes, the | ||
164 | constructor accepts certain keyword arguments used in | ||
165 | Beautiful Soup 3. None of these arguments do anything in | ||
166 | Beautiful Soup 4; they will result in a warning and then be | ||
167 | ignored. | ||
168 | |||
169 | Apart from this, any keyword arguments passed into the | ||
170 | BeautifulSoup constructor are propagated to the TreeBuilder | ||
171 | constructor. This makes it possible to configure a | ||
172 | TreeBuilder by passing in arguments, not just by saying which | ||
173 | one to use. | ||
174 | """ | ||
89 | if 'convertEntities' in kwargs: | 175 | if 'convertEntities' in kwargs: |
176 | del kwargs['convertEntities'] | ||
90 | warnings.warn( | 177 | warnings.warn( |
91 | "BS4 does not respect the convertEntities argument to the " | 178 | "BS4 does not respect the convertEntities argument to the " |
92 | "BeautifulSoup constructor. Entities are always converted " | 179 | "BeautifulSoup constructor. Entities are always converted " |
@@ -125,10 +212,10 @@ class BeautifulSoup(Tag): | |||
125 | if old_name in kwargs: | 212 | if old_name in kwargs: |
126 | warnings.warn( | 213 | warnings.warn( |
127 | 'The "%s" argument to the BeautifulSoup constructor ' | 214 | 'The "%s" argument to the BeautifulSoup constructor ' |
128 | 'has been renamed to "%s."' % (old_name, new_name)) | 215 | 'has been renamed to "%s."' % (old_name, new_name), |
129 | value = kwargs[old_name] | 216 | DeprecationWarning, stacklevel=3 |
130 | del kwargs[old_name] | 217 | ) |
131 | return value | 218 | return kwargs.pop(old_name) |
132 | return None | 219 | return None |
133 | 220 | ||
134 | parse_only = parse_only or deprecated_argument( | 221 | parse_only = parse_only or deprecated_argument( |
@@ -137,13 +224,23 @@ class BeautifulSoup(Tag): | |||
137 | from_encoding = from_encoding or deprecated_argument( | 224 | from_encoding = from_encoding or deprecated_argument( |
138 | "fromEncoding", "from_encoding") | 225 | "fromEncoding", "from_encoding") |
139 | 226 | ||
140 | if len(kwargs) > 0: | 227 | if from_encoding and isinstance(markup, str): |
141 | arg = list(kwargs.keys()).pop() | 228 | warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.") |
142 | raise TypeError( | 229 | from_encoding = None |
143 | "__init__() got an unexpected keyword argument '%s'" % arg) | 230 | |
144 | 231 | self.element_classes = element_classes or dict() | |
145 | if builder is None: | 232 | |
146 | original_features = features | 233 | # We need this information to track whether or not the builder |
234 | # was specified well enough that we can omit the 'you need to | ||
235 | # specify a parser' warning. | ||
236 | original_builder = builder | ||
237 | original_features = features | ||
238 | |||
239 | if isinstance(builder, type): | ||
240 | # A builder class was passed in; it needs to be instantiated. | ||
241 | builder_class = builder | ||
242 | builder = None | ||
243 | elif builder is None: | ||
147 | if isinstance(features, str): | 244 | if isinstance(features, str): |
148 | features = [features] | 245 | features = [features] |
149 | if features is None or len(features) == 0: | 246 | if features is None or len(features) == 0: |
@@ -154,85 +251,227 @@ class BeautifulSoup(Tag): | |||
154 | "Couldn't find a tree builder with the features you " | 251 | "Couldn't find a tree builder with the features you " |
155 | "requested: %s. Do you need to install a parser library?" | 252 | "requested: %s. Do you need to install a parser library?" |
156 | % ",".join(features)) | 253 | % ",".join(features)) |
157 | builder = builder_class() | 254 | |
158 | if not (original_features == builder.NAME or | 255 | # At this point either we have a TreeBuilder instance in |
159 | original_features in builder.ALTERNATE_NAMES): | 256 | # builder, or we have a builder_class that we can instantiate |
257 | # with the remaining **kwargs. | ||
258 | if builder is None: | ||
259 | builder = builder_class(**kwargs) | ||
260 | if not original_builder and not ( | ||
261 | original_features == builder.NAME or | ||
262 | original_features in builder.ALTERNATE_NAMES | ||
263 | ) and markup: | ||
264 | # The user did not tell us which TreeBuilder to use, | ||
265 | # and we had to guess. Issue a warning. | ||
160 | if builder.is_xml: | 266 | if builder.is_xml: |
161 | markup_type = "XML" | 267 | markup_type = "XML" |
162 | else: | 268 | else: |
163 | markup_type = "HTML" | 269 | markup_type = "HTML" |
164 | warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict( | ||
165 | parser=builder.NAME, | ||
166 | markup_type=markup_type)) | ||
167 | 270 | ||
271 | # This code adapted from warnings.py so that we get the same line | ||
272 | # of code as our warnings.warn() call gets, even if the answer is wrong | ||
273 | # (as it may be in a multithreading situation). | ||
274 | caller = None | ||
275 | try: | ||
276 | caller = sys._getframe(1) | ||
277 | except ValueError: | ||
278 | pass | ||
279 | if caller: | ||
280 | globals = caller.f_globals | ||
281 | line_number = caller.f_lineno | ||
282 | else: | ||
283 | globals = sys.__dict__ | ||
284 | line_number= 1 | ||
285 | filename = globals.get('__file__') | ||
286 | if filename: | ||
287 | fnl = filename.lower() | ||
288 | if fnl.endswith((".pyc", ".pyo")): | ||
289 | filename = filename[:-1] | ||
290 | if filename: | ||
291 | # If there is no filename at all, the user is most likely in a REPL, | ||
292 | # and the warning is not necessary. | ||
293 | values = dict( | ||
294 | filename=filename, | ||
295 | line_number=line_number, | ||
296 | parser=builder.NAME, | ||
297 | markup_type=markup_type | ||
298 | ) | ||
299 | warnings.warn( | ||
300 | self.NO_PARSER_SPECIFIED_WARNING % values, | ||
301 | GuessedAtParserWarning, stacklevel=2 | ||
302 | ) | ||
303 | else: | ||
304 | if kwargs: | ||
305 | warnings.warn("Keyword arguments to the BeautifulSoup constructor will be ignored. These would normally be passed into the TreeBuilder constructor, but a TreeBuilder instance was passed in as `builder`.") | ||
306 | |||
168 | self.builder = builder | 307 | self.builder = builder |
169 | self.is_xml = builder.is_xml | 308 | self.is_xml = builder.is_xml |
170 | self.builder.soup = self | 309 | self.known_xml = self.is_xml |
171 | 310 | self._namespaces = dict() | |
172 | self.parse_only = parse_only | 311 | self.parse_only = parse_only |
173 | 312 | ||
174 | if hasattr(markup, 'read'): # It's a file-type object. | 313 | if hasattr(markup, 'read'): # It's a file-type object. |
175 | markup = markup.read() | 314 | markup = markup.read() |
176 | elif len(markup) <= 256: | 315 | elif len(markup) <= 256 and ( |
177 | # Print out warnings for a couple beginner problems | 316 | (isinstance(markup, bytes) and not b'<' in markup) |
317 | or (isinstance(markup, str) and not '<' in markup) | ||
318 | ): | ||
319 | # Issue warnings for a couple beginner problems | ||
178 | # involving passing non-markup to Beautiful Soup. | 320 | # involving passing non-markup to Beautiful Soup. |
179 | # Beautiful Soup will still parse the input as markup, | 321 | # Beautiful Soup will still parse the input as markup, |
180 | # just in case that's what the user really wants. | 322 | # since that is sometimes the intended behavior. |
181 | if (isinstance(markup, str) | 323 | if not self._markup_is_url(markup): |
182 | and not os.path.supports_unicode_filenames): | 324 | self._markup_resembles_filename(markup) |
183 | possible_filename = markup.encode("utf8") | ||
184 | else: | ||
185 | possible_filename = markup | ||
186 | is_file = False | ||
187 | try: | ||
188 | is_file = os.path.exists(possible_filename) | ||
189 | except Exception as e: | ||
190 | # This is almost certainly a problem involving | ||
191 | # characters not valid in filenames on this | ||
192 | # system. Just let it go. | ||
193 | pass | ||
194 | if is_file: | ||
195 | if isinstance(markup, str): | ||
196 | markup = markup.encode("utf8") | ||
197 | warnings.warn( | ||
198 | '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup) | ||
199 | if markup[:5] == "http:" or markup[:6] == "https:": | ||
200 | # TODO: This is ugly but I couldn't get it to work in | ||
201 | # Python 3 otherwise. | ||
202 | if ((isinstance(markup, bytes) and not b' ' in markup) | ||
203 | or (isinstance(markup, str) and not ' ' in markup)): | ||
204 | if isinstance(markup, str): | ||
205 | markup = markup.encode("utf8") | ||
206 | warnings.warn( | ||
207 | '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup) | ||
208 | 325 | ||
326 | rejections = [] | ||
327 | success = False | ||
209 | for (self.markup, self.original_encoding, self.declared_html_encoding, | 328 | for (self.markup, self.original_encoding, self.declared_html_encoding, |
210 | self.contains_replacement_characters) in ( | 329 | self.contains_replacement_characters) in ( |
211 | self.builder.prepare_markup( | 330 | self.builder.prepare_markup( |
212 | markup, from_encoding, exclude_encodings=exclude_encodings)): | 331 | markup, from_encoding, exclude_encodings=exclude_encodings)): |
213 | self.reset() | 332 | self.reset() |
333 | self.builder.initialize_soup(self) | ||
214 | try: | 334 | try: |
215 | self._feed() | 335 | self._feed() |
336 | success = True | ||
216 | break | 337 | break |
217 | except ParserRejectedMarkup: | 338 | except ParserRejectedMarkup as e: |
339 | rejections.append(e) | ||
218 | pass | 340 | pass |
219 | 341 | ||
342 | if not success: | ||
343 | other_exceptions = [str(e) for e in rejections] | ||
344 | raise ParserRejectedMarkup( | ||
345 | "The markup you provided was rejected by the parser. Trying a different parser or a different encoding may help.\n\nOriginal exception(s) from parser:\n " + "\n ".join(other_exceptions) | ||
346 | ) | ||
347 | |||
220 | # Clear out the markup and remove the builder's circular | 348 | # Clear out the markup and remove the builder's circular |
221 | # reference to this object. | 349 | # reference to this object. |
222 | self.markup = None | 350 | self.markup = None |
223 | self.builder.soup = None | 351 | self.builder.soup = None |
224 | 352 | ||
225 | def __copy__(self): | 353 | def _clone(self): |
226 | return type(self)(self.encode(), builder=self.builder) | 354 | """Create a new BeautifulSoup object with the same TreeBuilder, |
355 | but not associated with any markup. | ||
356 | |||
357 | This is the first step of the deepcopy process. | ||
358 | """ | ||
359 | clone = type(self)("", None, self.builder) | ||
227 | 360 | ||
361 | # Keep track of the encoding of the original document, | ||
362 | # since we won't be parsing it again. | ||
363 | clone.original_encoding = self.original_encoding | ||
364 | return clone | ||
365 | |||
228 | def __getstate__(self): | 366 | def __getstate__(self): |
229 | # Frequently a tree builder can't be pickled. | 367 | # Frequently a tree builder can't be pickled. |
230 | d = dict(self.__dict__) | 368 | d = dict(self.__dict__) |
231 | if 'builder' in d and not self.builder.picklable: | 369 | if 'builder' in d and d['builder'] is not None and not self.builder.picklable: |
232 | del d['builder'] | 370 | d['builder'] = type(self.builder) |
371 | # Store the contents as a Unicode string. | ||
372 | d['contents'] = [] | ||
373 | d['markup'] = self.decode() | ||
374 | |||
375 | # If _most_recent_element is present, it's a Tag object left | ||
376 | # over from initial parse. It might not be picklable and we | ||
377 | # don't need it. | ||
378 | if '_most_recent_element' in d: | ||
379 | del d['_most_recent_element'] | ||
233 | return d | 380 | return d |
234 | 381 | ||
382 | def __setstate__(self, state): | ||
383 | # If necessary, restore the TreeBuilder by looking it up. | ||
384 | self.__dict__ = state | ||
385 | if isinstance(self.builder, type): | ||
386 | self.builder = self.builder() | ||
387 | elif not self.builder: | ||
388 | # We don't know which builder was used to build this | ||
389 | # parse tree, so use a default we know is always available. | ||
390 | self.builder = HTMLParserTreeBuilder() | ||
391 | self.builder.soup = self | ||
392 | self.reset() | ||
393 | self._feed() | ||
394 | return state | ||
395 | |||
396 | |||
397 | @classmethod | ||
398 | def _decode_markup(cls, markup): | ||
399 | """Ensure `markup` is bytes so it's safe to send into warnings.warn. | ||
400 | |||
401 | TODO: warnings.warn had this problem back in 2010 but it might not | ||
402 | anymore. | ||
403 | """ | ||
404 | if isinstance(markup, bytes): | ||
405 | decoded = markup.decode('utf-8', 'replace') | ||
406 | else: | ||
407 | decoded = markup | ||
408 | return decoded | ||
409 | |||
410 | @classmethod | ||
411 | def _markup_is_url(cls, markup): | ||
412 | """Error-handling method to raise a warning if incoming markup looks | ||
413 | like a URL. | ||
414 | |||
415 | :param markup: A string. | ||
416 | :return: Whether or not the markup resembles a URL | ||
417 | closely enough to justify a warning. | ||
418 | """ | ||
419 | if isinstance(markup, bytes): | ||
420 | space = b' ' | ||
421 | cant_start_with = (b"http:", b"https:") | ||
422 | elif isinstance(markup, str): | ||
423 | space = ' ' | ||
424 | cant_start_with = ("http:", "https:") | ||
425 | else: | ||
426 | return False | ||
427 | |||
428 | if any(markup.startswith(prefix) for prefix in cant_start_with): | ||
429 | if not space in markup: | ||
430 | warnings.warn( | ||
431 | 'The input looks more like a URL than markup. You may want to use' | ||
432 | ' an HTTP client like requests to get the document behind' | ||
433 | ' the URL, and feed that document to Beautiful Soup.', | ||
434 | MarkupResemblesLocatorWarning, | ||
435 | stacklevel=3 | ||
436 | ) | ||
437 | return True | ||
438 | return False | ||
439 | |||
440 | @classmethod | ||
441 | def _markup_resembles_filename(cls, markup): | ||
442 | """Error-handling method to raise a warning if incoming markup | ||
443 | resembles a filename. | ||
444 | |||
445 | :param markup: A bytestring or string. | ||
446 | :return: Whether or not the markup resembles a filename | ||
447 | closely enough to justify a warning. | ||
448 | """ | ||
449 | path_characters = '/\\' | ||
450 | extensions = ['.html', '.htm', '.xml', '.xhtml', '.txt'] | ||
451 | if isinstance(markup, bytes): | ||
452 | path_characters = path_characters.encode("utf8") | ||
453 | extensions = [x.encode('utf8') for x in extensions] | ||
454 | filelike = False | ||
455 | if any(x in markup for x in path_characters): | ||
456 | filelike = True | ||
457 | else: | ||
458 | lower = markup.lower() | ||
459 | if any(lower.endswith(ext) for ext in extensions): | ||
460 | filelike = True | ||
461 | if filelike: | ||
462 | warnings.warn( | ||
463 | 'The input looks more like a filename than markup. You may' | ||
464 | ' want to open this file and pass the filehandle into' | ||
465 | ' Beautiful Soup.', | ||
466 | MarkupResemblesLocatorWarning, stacklevel=3 | ||
467 | ) | ||
468 | return True | ||
469 | return False | ||
470 | |||
235 | def _feed(self): | 471 | def _feed(self): |
472 | """Internal method that parses previously set markup, creating a large | ||
473 | number of Tag and NavigableString objects. | ||
474 | """ | ||
236 | # Convert the document to Unicode. | 475 | # Convert the document to Unicode. |
237 | self.builder.reset() | 476 | self.builder.reset() |
238 | 477 | ||
@@ -243,48 +482,111 @@ class BeautifulSoup(Tag): | |||
243 | self.popTag() | 482 | self.popTag() |
244 | 483 | ||
245 | def reset(self): | 484 | def reset(self): |
485 | """Reset this object to a state as though it had never parsed any | ||
486 | markup. | ||
487 | """ | ||
246 | Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) | 488 | Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) |
247 | self.hidden = 1 | 489 | self.hidden = 1 |
248 | self.builder.reset() | 490 | self.builder.reset() |
249 | self.current_data = [] | 491 | self.current_data = [] |
250 | self.currentTag = None | 492 | self.currentTag = None |
251 | self.tagStack = [] | 493 | self.tagStack = [] |
494 | self.open_tag_counter = Counter() | ||
252 | self.preserve_whitespace_tag_stack = [] | 495 | self.preserve_whitespace_tag_stack = [] |
496 | self.string_container_stack = [] | ||
497 | self._most_recent_element = None | ||
253 | self.pushTag(self) | 498 | self.pushTag(self) |
254 | 499 | ||
255 | def new_tag(self, name, namespace=None, nsprefix=None, **attrs): | 500 | def new_tag(self, name, namespace=None, nsprefix=None, attrs={}, |
256 | """Create a new tag associated with this soup.""" | 501 | sourceline=None, sourcepos=None, **kwattrs): |
257 | return Tag(None, self.builder, name, namespace, nsprefix, attrs) | 502 | """Create a new Tag associated with this BeautifulSoup object. |
503 | |||
504 | :param name: The name of the new Tag. | ||
505 | :param namespace: The URI of the new Tag's XML namespace, if any. | ||
506 | :param prefix: The prefix for the new Tag's XML namespace, if any. | ||
507 | :param attrs: A dictionary of this Tag's attribute values; can | ||
508 | be used instead of `kwattrs` for attributes like 'class' | ||
509 | that are reserved words in Python. | ||
510 | :param sourceline: The line number where this tag was | ||
511 | (purportedly) found in its source document. | ||
512 | :param sourcepos: The character position within `sourceline` where this | ||
513 | tag was (purportedly) found. | ||
514 | :param kwattrs: Keyword arguments for the new Tag's attribute values. | ||
258 | 515 | ||
259 | def new_string(self, s, subclass=NavigableString): | 516 | """ |
260 | """Create a new NavigableString associated with this soup.""" | 517 | kwattrs.update(attrs) |
261 | return subclass(s) | 518 | return self.element_classes.get(Tag, Tag)( |
519 | None, self.builder, name, namespace, nsprefix, kwattrs, | ||
520 | sourceline=sourceline, sourcepos=sourcepos | ||
521 | ) | ||
522 | |||
523 | def string_container(self, base_class=None): | ||
524 | container = base_class or NavigableString | ||
525 | |||
526 | # There may be a general override of NavigableString. | ||
527 | container = self.element_classes.get( | ||
528 | container, container | ||
529 | ) | ||
530 | |||
531 | # On top of that, we may be inside a tag that needs a special | ||
532 | # container class. | ||
533 | if self.string_container_stack and container is NavigableString: | ||
534 | container = self.builder.string_containers.get( | ||
535 | self.string_container_stack[-1].name, container | ||
536 | ) | ||
537 | return container | ||
538 | |||
539 | def new_string(self, s, subclass=None): | ||
540 | """Create a new NavigableString associated with this BeautifulSoup | ||
541 | object. | ||
542 | """ | ||
543 | container = self.string_container(subclass) | ||
544 | return container(s) | ||
262 | 545 | ||
263 | def insert_before(self, successor): | 546 | def insert_before(self, *args): |
547 | """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement | ||
548 | it because there is nothing before or after it in the parse tree. | ||
549 | """ | ||
264 | raise NotImplementedError("BeautifulSoup objects don't support insert_before().") | 550 | raise NotImplementedError("BeautifulSoup objects don't support insert_before().") |
265 | 551 | ||
266 | def insert_after(self, successor): | 552 | def insert_after(self, *args): |
553 | """This method is part of the PageElement API, but `BeautifulSoup` doesn't implement | ||
554 | it because there is nothing before or after it in the parse tree. | ||
555 | """ | ||
267 | raise NotImplementedError("BeautifulSoup objects don't support insert_after().") | 556 | raise NotImplementedError("BeautifulSoup objects don't support insert_after().") |
268 | 557 | ||
269 | def popTag(self): | 558 | def popTag(self): |
559 | """Internal method called by _popToTag when a tag is closed.""" | ||
270 | tag = self.tagStack.pop() | 560 | tag = self.tagStack.pop() |
561 | if tag.name in self.open_tag_counter: | ||
562 | self.open_tag_counter[tag.name] -= 1 | ||
271 | if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: | 563 | if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: |
272 | self.preserve_whitespace_tag_stack.pop() | 564 | self.preserve_whitespace_tag_stack.pop() |
273 | #print "Pop", tag.name | 565 | if self.string_container_stack and tag == self.string_container_stack[-1]: |
566 | self.string_container_stack.pop() | ||
567 | #print("Pop", tag.name) | ||
274 | if self.tagStack: | 568 | if self.tagStack: |
275 | self.currentTag = self.tagStack[-1] | 569 | self.currentTag = self.tagStack[-1] |
276 | return self.currentTag | 570 | return self.currentTag |
277 | 571 | ||
278 | def pushTag(self, tag): | 572 | def pushTag(self, tag): |
279 | #print "Push", tag.name | 573 | """Internal method called by handle_starttag when a tag is opened.""" |
280 | if self.currentTag: | 574 | #print("Push", tag.name) |
575 | if self.currentTag is not None: | ||
281 | self.currentTag.contents.append(tag) | 576 | self.currentTag.contents.append(tag) |
282 | self.tagStack.append(tag) | 577 | self.tagStack.append(tag) |
283 | self.currentTag = self.tagStack[-1] | 578 | self.currentTag = self.tagStack[-1] |
579 | if tag.name != self.ROOT_TAG_NAME: | ||
580 | self.open_tag_counter[tag.name] += 1 | ||
284 | if tag.name in self.builder.preserve_whitespace_tags: | 581 | if tag.name in self.builder.preserve_whitespace_tags: |
285 | self.preserve_whitespace_tag_stack.append(tag) | 582 | self.preserve_whitespace_tag_stack.append(tag) |
583 | if tag.name in self.builder.string_containers: | ||
584 | self.string_container_stack.append(tag) | ||
286 | 585 | ||
287 | def endData(self, containerClass=NavigableString): | 586 | def endData(self, containerClass=None): |
587 | """Method called by the TreeBuilder when the end of a data segment | ||
588 | occurs. | ||
589 | """ | ||
288 | if self.current_data: | 590 | if self.current_data: |
289 | current_data = ''.join(self.current_data) | 591 | current_data = ''.join(self.current_data) |
290 | # If whitespace is not preserved, and this string contains | 592 | # If whitespace is not preserved, and this string contains |
@@ -311,61 +613,93 @@ class BeautifulSoup(Tag): | |||
311 | not self.parse_only.search(current_data)): | 613 | not self.parse_only.search(current_data)): |
312 | return | 614 | return |
313 | 615 | ||
616 | containerClass = self.string_container(containerClass) | ||
314 | o = containerClass(current_data) | 617 | o = containerClass(current_data) |
315 | self.object_was_parsed(o) | 618 | self.object_was_parsed(o) |
316 | 619 | ||
317 | def object_was_parsed(self, o, parent=None, most_recent_element=None): | 620 | def object_was_parsed(self, o, parent=None, most_recent_element=None): |
318 | """Add an object to the parse tree.""" | 621 | """Method called by the TreeBuilder to integrate an object into the parse tree.""" |
319 | parent = parent or self.currentTag | 622 | if parent is None: |
320 | previous_element = most_recent_element or self._most_recent_element | 623 | parent = self.currentTag |
624 | if most_recent_element is not None: | ||
625 | previous_element = most_recent_element | ||
626 | else: | ||
627 | previous_element = self._most_recent_element | ||
321 | 628 | ||
322 | next_element = previous_sibling = next_sibling = None | 629 | next_element = previous_sibling = next_sibling = None |
323 | if isinstance(o, Tag): | 630 | if isinstance(o, Tag): |
324 | next_element = o.next_element | 631 | next_element = o.next_element |
325 | next_sibling = o.next_sibling | 632 | next_sibling = o.next_sibling |
326 | previous_sibling = o.previous_sibling | 633 | previous_sibling = o.previous_sibling |
327 | if not previous_element: | 634 | if previous_element is None: |
328 | previous_element = o.previous_element | 635 | previous_element = o.previous_element |
329 | 636 | ||
637 | fix = parent.next_element is not None | ||
638 | |||
330 | o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) | 639 | o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) |
331 | 640 | ||
332 | self._most_recent_element = o | 641 | self._most_recent_element = o |
333 | parent.contents.append(o) | 642 | parent.contents.append(o) |
334 | 643 | ||
335 | if parent.next_sibling: | 644 | # Check if we are inserting into an already parsed node. |
336 | # This node is being inserted into an element that has | 645 | if fix: |
337 | # already been parsed. Deal with any dangling references. | 646 | self._linkage_fixer(parent) |
338 | index = parent.contents.index(o) | 647 | |
339 | if index == 0: | 648 | def _linkage_fixer(self, el): |
340 | previous_element = parent | 649 | """Make sure linkage of this fragment is sound.""" |
341 | previous_sibling = None | 650 | |
342 | else: | 651 | first = el.contents[0] |
343 | previous_element = previous_sibling = parent.contents[index-1] | 652 | child = el.contents[-1] |
344 | if index == len(parent.contents)-1: | 653 | descendant = child |
345 | next_element = parent.next_sibling | 654 | |
346 | next_sibling = None | 655 | if child is first and el.parent is not None: |
347 | else: | 656 | # Parent should be linked to first child |
348 | next_element = next_sibling = parent.contents[index+1] | 657 | el.next_element = child |
349 | 658 | # We are no longer linked to whatever this element is | |
350 | o.previous_element = previous_element | 659 | prev_el = child.previous_element |
351 | if previous_element: | 660 | if prev_el is not None and prev_el is not el: |
352 | previous_element.next_element = o | 661 | prev_el.next_element = None |
353 | o.next_element = next_element | 662 | # First child should be linked to the parent, and no previous siblings. |
354 | if next_element: | 663 | child.previous_element = el |
355 | next_element.previous_element = o | 664 | child.previous_sibling = None |
356 | o.next_sibling = next_sibling | 665 | |
357 | if next_sibling: | 666 | # We have no sibling as we've been appended as the last. |
358 | next_sibling.previous_sibling = o | 667 | child.next_sibling = None |
359 | o.previous_sibling = previous_sibling | 668 | |
360 | if previous_sibling: | 669 | # This index is a tag, dig deeper for a "last descendant" |
361 | previous_sibling.next_sibling = o | 670 | if isinstance(child, Tag) and child.contents: |
671 | descendant = child._last_descendant(False) | ||
672 | |||
673 | # As the final step, link last descendant. It should be linked | ||
674 | # to the parent's next sibling (if found), else walk up the chain | ||
675 | # and find a parent with a sibling. It should have no next sibling. | ||
676 | descendant.next_element = None | ||
677 | descendant.next_sibling = None | ||
678 | target = el | ||
679 | while True: | ||
680 | if target is None: | ||
681 | break | ||
682 | elif target.next_sibling is not None: | ||
683 | descendant.next_element = target.next_sibling | ||
684 | target.next_sibling.previous_element = child | ||
685 | break | ||
686 | target = target.parent | ||
362 | 687 | ||
363 | def _popToTag(self, name, nsprefix=None, inclusivePop=True): | 688 | def _popToTag(self, name, nsprefix=None, inclusivePop=True): |
364 | """Pops the tag stack up to and including the most recent | 689 | """Pops the tag stack up to and including the most recent |
365 | instance of the given tag. If inclusivePop is false, pops the tag | 690 | instance of the given tag. |
366 | stack up to but *not* including the most recent instqance of | 691 | |
367 | the given tag.""" | 692 | If there are no open tags with the given name, nothing will be |
368 | #print "Popping to %s" % name | 693 | popped. |
694 | |||
695 | :param name: Pop up to the most recent tag with this name. | ||
696 | :param nsprefix: The namespace prefix that goes with `name`. | ||
697 | :param inclusivePop: It this is false, pops the tag stack up | ||
698 | to but *not* including the most recent instqance of the | ||
699 | given tag. | ||
700 | |||
701 | """ | ||
702 | #print("Popping to %s" % name) | ||
369 | if name == self.ROOT_TAG_NAME: | 703 | if name == self.ROOT_TAG_NAME: |
370 | # The BeautifulSoup object itself can never be popped. | 704 | # The BeautifulSoup object itself can never be popped. |
371 | return | 705 | return |
@@ -374,6 +708,8 @@ class BeautifulSoup(Tag): | |||
374 | 708 | ||
375 | stack_size = len(self.tagStack) | 709 | stack_size = len(self.tagStack) |
376 | for i in range(stack_size - 1, 0, -1): | 710 | for i in range(stack_size - 1, 0, -1): |
711 | if not self.open_tag_counter.get(name): | ||
712 | break | ||
377 | t = self.tagStack[i] | 713 | t = self.tagStack[i] |
378 | if (name == t.name and nsprefix == t.prefix): | 714 | if (name == t.name and nsprefix == t.prefix): |
379 | if inclusivePop: | 715 | if inclusivePop: |
@@ -383,16 +719,26 @@ class BeautifulSoup(Tag): | |||
383 | 719 | ||
384 | return most_recently_popped | 720 | return most_recently_popped |
385 | 721 | ||
386 | def handle_starttag(self, name, namespace, nsprefix, attrs): | 722 | def handle_starttag(self, name, namespace, nsprefix, attrs, sourceline=None, |
387 | """Push a start tag on to the stack. | 723 | sourcepos=None, namespaces=None): |
388 | 724 | """Called by the tree builder when a new tag is encountered. | |
389 | If this method returns None, the tag was rejected by the | 725 | |
390 | SoupStrainer. You should proceed as if the tag had not occured | 726 | :param name: Name of the tag. |
727 | :param nsprefix: Namespace prefix for the tag. | ||
728 | :param attrs: A dictionary of attribute values. | ||
729 | :param sourceline: The line number where this tag was found in its | ||
730 | source document. | ||
731 | :param sourcepos: The character position within `sourceline` where this | ||
732 | tag was found. | ||
733 | :param namespaces: A dictionary of all namespace prefix mappings | ||
734 | currently in scope in the document. | ||
735 | |||
736 | If this method returns None, the tag was rejected by an active | ||
737 | SoupStrainer. You should proceed as if the tag had not occurred | ||
391 | in the document. For instance, if this was a self-closing tag, | 738 | in the document. For instance, if this was a self-closing tag, |
392 | don't call handle_endtag. | 739 | don't call handle_endtag. |
393 | """ | 740 | """ |
394 | 741 | # print("Start tag %s: %s" % (name, attrs)) | |
395 | # print "Start tag %s: %s" % (name, attrs) | ||
396 | self.endData() | 742 | self.endData() |
397 | 743 | ||
398 | if (self.parse_only and len(self.tagStack) <= 1 | 744 | if (self.parse_only and len(self.tagStack) <= 1 |
@@ -400,34 +746,54 @@ class BeautifulSoup(Tag): | |||
400 | or not self.parse_only.search_tag(name, attrs))): | 746 | or not self.parse_only.search_tag(name, attrs))): |
401 | return None | 747 | return None |
402 | 748 | ||
403 | tag = Tag(self, self.builder, name, namespace, nsprefix, attrs, | 749 | tag = self.element_classes.get(Tag, Tag)( |
404 | self.currentTag, self._most_recent_element) | 750 | self, self.builder, name, namespace, nsprefix, attrs, |
751 | self.currentTag, self._most_recent_element, | ||
752 | sourceline=sourceline, sourcepos=sourcepos, | ||
753 | namespaces=namespaces | ||
754 | ) | ||
405 | if tag is None: | 755 | if tag is None: |
406 | return tag | 756 | return tag |
407 | if self._most_recent_element: | 757 | if self._most_recent_element is not None: |
408 | self._most_recent_element.next_element = tag | 758 | self._most_recent_element.next_element = tag |
409 | self._most_recent_element = tag | 759 | self._most_recent_element = tag |
410 | self.pushTag(tag) | 760 | self.pushTag(tag) |
411 | return tag | 761 | return tag |
412 | 762 | ||
413 | def handle_endtag(self, name, nsprefix=None): | 763 | def handle_endtag(self, name, nsprefix=None): |
414 | #print "End tag: " + name | 764 | """Called by the tree builder when an ending tag is encountered. |
765 | |||
766 | :param name: Name of the tag. | ||
767 | :param nsprefix: Namespace prefix for the tag. | ||
768 | """ | ||
769 | #print("End tag: " + name) | ||
415 | self.endData() | 770 | self.endData() |
416 | self._popToTag(name, nsprefix) | 771 | self._popToTag(name, nsprefix) |
417 | 772 | ||
418 | def handle_data(self, data): | 773 | def handle_data(self, data): |
774 | """Called by the tree builder when a chunk of textual data is encountered.""" | ||
419 | self.current_data.append(data) | 775 | self.current_data.append(data) |
420 | 776 | ||
421 | def decode(self, pretty_print=False, | 777 | def decode(self, pretty_print=False, |
422 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, | 778 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, |
423 | formatter="minimal"): | 779 | formatter="minimal", iterator=None): |
424 | """Returns a string or Unicode representation of this document. | 780 | """Returns a string or Unicode representation of the parse tree |
425 | To get Unicode, pass None for encoding.""" | 781 | as an HTML or XML document. |
426 | 782 | ||
783 | :param pretty_print: If this is True, indentation will be used to | ||
784 | make the document more readable. | ||
785 | :param eventual_encoding: The encoding of the final document. | ||
786 | If this is None, the document will be a Unicode string. | ||
787 | """ | ||
427 | if self.is_xml: | 788 | if self.is_xml: |
428 | # Print the XML declaration | 789 | # Print the XML declaration |
429 | encoding_part = '' | 790 | encoding_part = '' |
430 | if eventual_encoding is not None: | 791 | if eventual_encoding in PYTHON_SPECIFIC_ENCODINGS: |
792 | # This is a special Python encoding; it can't actually | ||
793 | # go into an XML document because it means nothing | ||
794 | # outside of Python. | ||
795 | eventual_encoding = None | ||
796 | if eventual_encoding != None: | ||
431 | encoding_part = ' encoding="%s"' % eventual_encoding | 797 | encoding_part = ' encoding="%s"' % eventual_encoding |
432 | prefix = '<?xml version="1.0"%s?>\n' % encoding_part | 798 | prefix = '<?xml version="1.0"%s?>\n' % encoding_part |
433 | else: | 799 | else: |
@@ -437,9 +803,9 @@ class BeautifulSoup(Tag): | |||
437 | else: | 803 | else: |
438 | indent_level = 0 | 804 | indent_level = 0 |
439 | return prefix + super(BeautifulSoup, self).decode( | 805 | return prefix + super(BeautifulSoup, self).decode( |
440 | indent_level, eventual_encoding, formatter) | 806 | indent_level, eventual_encoding, formatter, iterator) |
441 | 807 | ||
442 | # Alias to make it easier to type import: 'from bs4 import _soup' | 808 | # Aliases to make it easier to get started quickly, e.g. 'from bs4 import _soup' |
443 | _s = BeautifulSoup | 809 | _s = BeautifulSoup |
444 | _soup = BeautifulSoup | 810 | _soup = BeautifulSoup |
445 | 811 | ||
@@ -450,19 +816,25 @@ class BeautifulStoneSoup(BeautifulSoup): | |||
450 | kwargs['features'] = 'xml' | 816 | kwargs['features'] = 'xml' |
451 | warnings.warn( | 817 | warnings.warn( |
452 | 'The BeautifulStoneSoup class is deprecated. Instead of using ' | 818 | 'The BeautifulStoneSoup class is deprecated. Instead of using ' |
453 | 'it, pass features="xml" into the BeautifulSoup constructor.') | 819 | 'it, pass features="xml" into the BeautifulSoup constructor.', |
820 | DeprecationWarning, stacklevel=2 | ||
821 | ) | ||
454 | super(BeautifulStoneSoup, self).__init__(*args, **kwargs) | 822 | super(BeautifulStoneSoup, self).__init__(*args, **kwargs) |
455 | 823 | ||
456 | 824 | ||
457 | class StopParsing(Exception): | 825 | class StopParsing(Exception): |
826 | """Exception raised by a TreeBuilder if it's unable to continue parsing.""" | ||
458 | pass | 827 | pass |
459 | 828 | ||
460 | class FeatureNotFound(ValueError): | 829 | class FeatureNotFound(ValueError): |
830 | """Exception raised by the BeautifulSoup constructor if no parser with the | ||
831 | requested features is found. | ||
832 | """ | ||
461 | pass | 833 | pass |
462 | 834 | ||
463 | 835 | ||
464 | #By default, act as an HTML pretty-printer. | 836 | #If this file is run as a script, act as an HTML pretty-printer. |
465 | if __name__ == '__main__': | 837 | if __name__ == '__main__': |
466 | import sys | 838 | import sys |
467 | soup = BeautifulSoup(sys.stdin) | 839 | soup = BeautifulSoup(sys.stdin) |
468 | print(soup.prettify()) | 840 | print((soup.prettify())) |
diff --git a/bitbake/lib/bs4/builder/__init__.py b/bitbake/lib/bs4/builder/__init__.py index 6ccd4d23d6..ffb31fc25e 100644 --- a/bitbake/lib/bs4/builder/__init__.py +++ b/bitbake/lib/bs4/builder/__init__.py | |||
@@ -1,11 +1,21 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | from collections import defaultdict | 4 | from collections import defaultdict |
2 | import itertools | 5 | import itertools |
6 | import re | ||
7 | import warnings | ||
3 | import sys | 8 | import sys |
4 | from bs4.element import ( | 9 | from bs4.element import ( |
5 | CharsetMetaAttributeValue, | 10 | CharsetMetaAttributeValue, |
6 | ContentMetaAttributeValue, | 11 | ContentMetaAttributeValue, |
7 | whitespace_re | 12 | RubyParenthesisString, |
8 | ) | 13 | RubyTextString, |
14 | Stylesheet, | ||
15 | Script, | ||
16 | TemplateString, | ||
17 | nonwhitespace_re | ||
18 | ) | ||
9 | 19 | ||
10 | __all__ = [ | 20 | __all__ = [ |
11 | 'HTMLTreeBuilder', | 21 | 'HTMLTreeBuilder', |
@@ -22,20 +32,41 @@ XML = 'xml' | |||
22 | HTML = 'html' | 32 | HTML = 'html' |
23 | HTML_5 = 'html5' | 33 | HTML_5 = 'html5' |
24 | 34 | ||
35 | class XMLParsedAsHTMLWarning(UserWarning): | ||
36 | """The warning issued when an HTML parser is used to parse | ||
37 | XML that is not XHTML. | ||
38 | """ | ||
39 | MESSAGE = """It looks like you're parsing an XML document using an HTML parser. If this really is an HTML document (maybe it's XHTML?), you can ignore or filter this warning. If it's XML, you should know that using an XML parser will be more reliable. To parse this document as XML, make sure you have the lxml package installed, and pass the keyword argument `features="xml"` into the BeautifulSoup constructor.""" | ||
40 | |||
25 | 41 | ||
26 | class TreeBuilderRegistry(object): | 42 | class TreeBuilderRegistry(object): |
27 | 43 | """A way of looking up TreeBuilder subclasses by their name or by desired | |
44 | features. | ||
45 | """ | ||
46 | |||
28 | def __init__(self): | 47 | def __init__(self): |
29 | self.builders_for_feature = defaultdict(list) | 48 | self.builders_for_feature = defaultdict(list) |
30 | self.builders = [] | 49 | self.builders = [] |
31 | 50 | ||
32 | def register(self, treebuilder_class): | 51 | def register(self, treebuilder_class): |
33 | """Register a treebuilder based on its advertised features.""" | 52 | """Register a treebuilder based on its advertised features. |
53 | |||
54 | :param treebuilder_class: A subclass of Treebuilder. its .features | ||
55 | attribute should list its features. | ||
56 | """ | ||
34 | for feature in treebuilder_class.features: | 57 | for feature in treebuilder_class.features: |
35 | self.builders_for_feature[feature].insert(0, treebuilder_class) | 58 | self.builders_for_feature[feature].insert(0, treebuilder_class) |
36 | self.builders.insert(0, treebuilder_class) | 59 | self.builders.insert(0, treebuilder_class) |
37 | 60 | ||
38 | def lookup(self, *features): | 61 | def lookup(self, *features): |
62 | """Look up a TreeBuilder subclass with the desired features. | ||
63 | |||
64 | :param features: A list of features to look for. If none are | ||
65 | provided, the most recently registered TreeBuilder subclass | ||
66 | will be used. | ||
67 | :return: A TreeBuilder subclass, or None if there's no | ||
68 | registered subclass with all the requested features. | ||
69 | """ | ||
39 | if len(self.builders) == 0: | 70 | if len(self.builders) == 0: |
40 | # There are no builders at all. | 71 | # There are no builders at all. |
41 | return None | 72 | return None |
@@ -78,7 +109,7 @@ class TreeBuilderRegistry(object): | |||
78 | builder_registry = TreeBuilderRegistry() | 109 | builder_registry = TreeBuilderRegistry() |
79 | 110 | ||
80 | class TreeBuilder(object): | 111 | class TreeBuilder(object): |
81 | """Turn a document into a Beautiful Soup object tree.""" | 112 | """Turn a textual document into a Beautiful Soup object tree.""" |
82 | 113 | ||
83 | NAME = "[Unknown tree builder]" | 114 | NAME = "[Unknown tree builder]" |
84 | ALTERNATE_NAMES = [] | 115 | ALTERNATE_NAMES = [] |
@@ -86,19 +117,89 @@ class TreeBuilder(object): | |||
86 | 117 | ||
87 | is_xml = False | 118 | is_xml = False |
88 | picklable = False | 119 | picklable = False |
89 | preserve_whitespace_tags = set() | ||
90 | empty_element_tags = None # A tag will be considered an empty-element | 120 | empty_element_tags = None # A tag will be considered an empty-element |
91 | # tag when and only when it has no contents. | 121 | # tag when and only when it has no contents. |
92 | 122 | ||
93 | # A value for these tag/attribute combinations is a space- or | 123 | # A value for these tag/attribute combinations is a space- or |
94 | # comma-separated list of CDATA, rather than a single CDATA. | 124 | # comma-separated list of CDATA, rather than a single CDATA. |
95 | cdata_list_attributes = {} | 125 | DEFAULT_CDATA_LIST_ATTRIBUTES = defaultdict(list) |
96 | 126 | ||
97 | 127 | # Whitespace should be preserved inside these tags. | |
98 | def __init__(self): | 128 | DEFAULT_PRESERVE_WHITESPACE_TAGS = set() |
129 | |||
130 | # The textual contents of tags with these names should be | ||
131 | # instantiated with some class other than NavigableString. | ||
132 | DEFAULT_STRING_CONTAINERS = {} | ||
133 | |||
134 | USE_DEFAULT = object() | ||
135 | |||
136 | # Most parsers don't keep track of line numbers. | ||
137 | TRACKS_LINE_NUMBERS = False | ||
138 | |||
139 | def __init__(self, multi_valued_attributes=USE_DEFAULT, | ||
140 | preserve_whitespace_tags=USE_DEFAULT, | ||
141 | store_line_numbers=USE_DEFAULT, | ||
142 | string_containers=USE_DEFAULT, | ||
143 | ): | ||
144 | """Constructor. | ||
145 | |||
146 | :param multi_valued_attributes: If this is set to None, the | ||
147 | TreeBuilder will not turn any values for attributes like | ||
148 | 'class' into lists. Setting this to a dictionary will | ||
149 | customize this behavior; look at DEFAULT_CDATA_LIST_ATTRIBUTES | ||
150 | for an example. | ||
151 | |||
152 | Internally, these are called "CDATA list attributes", but that | ||
153 | probably doesn't make sense to an end-user, so the argument name | ||
154 | is `multi_valued_attributes`. | ||
155 | |||
156 | :param preserve_whitespace_tags: A list of tags to treat | ||
157 | the way <pre> tags are treated in HTML. Tags in this list | ||
158 | are immune from pretty-printing; their contents will always be | ||
159 | output as-is. | ||
160 | |||
161 | :param string_containers: A dictionary mapping tag names to | ||
162 | the classes that should be instantiated to contain the textual | ||
163 | contents of those tags. The default is to use NavigableString | ||
164 | for every tag, no matter what the name. You can override the | ||
165 | default by changing DEFAULT_STRING_CONTAINERS. | ||
166 | |||
167 | :param store_line_numbers: If the parser keeps track of the | ||
168 | line numbers and positions of the original markup, that | ||
169 | information will, by default, be stored in each corresponding | ||
170 | `Tag` object. You can turn this off by passing | ||
171 | store_line_numbers=False. If the parser you're using doesn't | ||
172 | keep track of this information, then setting store_line_numbers=True | ||
173 | will do nothing. | ||
174 | """ | ||
99 | self.soup = None | 175 | self.soup = None |
100 | 176 | if multi_valued_attributes is self.USE_DEFAULT: | |
177 | multi_valued_attributes = self.DEFAULT_CDATA_LIST_ATTRIBUTES | ||
178 | self.cdata_list_attributes = multi_valued_attributes | ||
179 | if preserve_whitespace_tags is self.USE_DEFAULT: | ||
180 | preserve_whitespace_tags = self.DEFAULT_PRESERVE_WHITESPACE_TAGS | ||
181 | self.preserve_whitespace_tags = preserve_whitespace_tags | ||
182 | if store_line_numbers == self.USE_DEFAULT: | ||
183 | store_line_numbers = self.TRACKS_LINE_NUMBERS | ||
184 | self.store_line_numbers = store_line_numbers | ||
185 | if string_containers == self.USE_DEFAULT: | ||
186 | string_containers = self.DEFAULT_STRING_CONTAINERS | ||
187 | self.string_containers = string_containers | ||
188 | |||
189 | def initialize_soup(self, soup): | ||
190 | """The BeautifulSoup object has been initialized and is now | ||
191 | being associated with the TreeBuilder. | ||
192 | |||
193 | :param soup: A BeautifulSoup object. | ||
194 | """ | ||
195 | self.soup = soup | ||
196 | |||
101 | def reset(self): | 197 | def reset(self): |
198 | """Do any work necessary to reset the underlying parser | ||
199 | for a new document. | ||
200 | |||
201 | By default, this does nothing. | ||
202 | """ | ||
102 | pass | 203 | pass |
103 | 204 | ||
104 | def can_be_empty_element(self, tag_name): | 205 | def can_be_empty_element(self, tag_name): |
@@ -110,24 +211,58 @@ class TreeBuilder(object): | |||
110 | For instance: an HTMLBuilder does not consider a <p> tag to be | 211 | For instance: an HTMLBuilder does not consider a <p> tag to be |
111 | an empty-element tag (it's not in | 212 | an empty-element tag (it's not in |
112 | HTMLBuilder.empty_element_tags). This means an empty <p> tag | 213 | HTMLBuilder.empty_element_tags). This means an empty <p> tag |
113 | will be presented as "<p></p>", not "<p />". | 214 | will be presented as "<p></p>", not "<p/>" or "<p>". |
114 | 215 | ||
115 | The default implementation has no opinion about which tags are | 216 | The default implementation has no opinion about which tags are |
116 | empty-element tags, so a tag will be presented as an | 217 | empty-element tags, so a tag will be presented as an |
117 | empty-element tag if and only if it has no contents. | 218 | empty-element tag if and only if it has no children. |
118 | "<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will | 219 | "<foo></foo>" will become "<foo/>", and "<foo>bar</foo>" will |
119 | be left alone. | 220 | be left alone. |
221 | |||
222 | :param tag_name: The name of a markup tag. | ||
120 | """ | 223 | """ |
121 | if self.empty_element_tags is None: | 224 | if self.empty_element_tags is None: |
122 | return True | 225 | return True |
123 | return tag_name in self.empty_element_tags | 226 | return tag_name in self.empty_element_tags |
124 | 227 | ||
125 | def feed(self, markup): | 228 | def feed(self, markup): |
229 | """Run some incoming markup through some parsing process, | ||
230 | populating the `BeautifulSoup` object in self.soup. | ||
231 | |||
232 | This method is not implemented in TreeBuilder; it must be | ||
233 | implemented in subclasses. | ||
234 | |||
235 | :return: None. | ||
236 | """ | ||
126 | raise NotImplementedError() | 237 | raise NotImplementedError() |
127 | 238 | ||
128 | def prepare_markup(self, markup, user_specified_encoding=None, | 239 | def prepare_markup(self, markup, user_specified_encoding=None, |
129 | document_declared_encoding=None): | 240 | document_declared_encoding=None, exclude_encodings=None): |
130 | return markup, None, None, False | 241 | """Run any preliminary steps necessary to make incoming markup |
242 | acceptable to the parser. | ||
243 | |||
244 | :param markup: Some markup -- probably a bytestring. | ||
245 | :param user_specified_encoding: The user asked to try this encoding. | ||
246 | :param document_declared_encoding: The markup itself claims to be | ||
247 | in this encoding. NOTE: This argument is not used by the | ||
248 | calling code and can probably be removed. | ||
249 | :param exclude_encodings: The user asked _not_ to try any of | ||
250 | these encodings. | ||
251 | |||
252 | :yield: A series of 4-tuples: | ||
253 | (markup, encoding, declared encoding, | ||
254 | has undergone character replacement) | ||
255 | |||
256 | Each 4-tuple represents a strategy for converting the | ||
257 | document to Unicode and parsing it. Each strategy will be tried | ||
258 | in turn. | ||
259 | |||
260 | By default, the only strategy is to parse the markup | ||
261 | as-is. See `LXMLTreeBuilderForXML` and | ||
262 | `HTMLParserTreeBuilder` for implementations that take into | ||
263 | account the quirks of particular parsers. | ||
264 | """ | ||
265 | yield markup, None, None, False | ||
131 | 266 | ||
132 | def test_fragment_to_document(self, fragment): | 267 | def test_fragment_to_document(self, fragment): |
133 | """Wrap an HTML fragment to make it look like a document. | 268 | """Wrap an HTML fragment to make it look like a document. |
@@ -139,16 +274,36 @@ class TreeBuilder(object): | |||
139 | results against other HTML fragments. | 274 | results against other HTML fragments. |
140 | 275 | ||
141 | This method should not be used outside of tests. | 276 | This method should not be used outside of tests. |
277 | |||
278 | :param fragment: A string -- fragment of HTML. | ||
279 | :return: A string -- a full HTML document. | ||
142 | """ | 280 | """ |
143 | return fragment | 281 | return fragment |
144 | 282 | ||
145 | def set_up_substitutions(self, tag): | 283 | def set_up_substitutions(self, tag): |
284 | """Set up any substitutions that will need to be performed on | ||
285 | a `Tag` when it's output as a string. | ||
286 | |||
287 | By default, this does nothing. See `HTMLTreeBuilder` for a | ||
288 | case where this is used. | ||
289 | |||
290 | :param tag: A `Tag` | ||
291 | :return: Whether or not a substitution was performed. | ||
292 | """ | ||
146 | return False | 293 | return False |
147 | 294 | ||
148 | def _replace_cdata_list_attribute_values(self, tag_name, attrs): | 295 | def _replace_cdata_list_attribute_values(self, tag_name, attrs): |
149 | """Replaces class="foo bar" with class=["foo", "bar"] | 296 | """When an attribute value is associated with a tag that can |
297 | have multiple values for that attribute, convert the string | ||
298 | value to a list of strings. | ||
150 | 299 | ||
151 | Modifies its input in place. | 300 | Basically, replaces class="foo bar" with class=["foo", "bar"] |
301 | |||
302 | NOTE: This method modifies its input in place. | ||
303 | |||
304 | :param tag_name: The name of a tag. | ||
305 | :param attrs: A dictionary containing the tag's attributes. | ||
306 | Any appropriate attribute values will be modified in place. | ||
152 | """ | 307 | """ |
153 | if not attrs: | 308 | if not attrs: |
154 | return attrs | 309 | return attrs |
@@ -163,7 +318,7 @@ class TreeBuilder(object): | |||
163 | # values. Split it into a list. | 318 | # values. Split it into a list. |
164 | value = attrs[attr] | 319 | value = attrs[attr] |
165 | if isinstance(value, str): | 320 | if isinstance(value, str): |
166 | values = whitespace_re.split(value) | 321 | values = nonwhitespace_re.findall(value) |
167 | else: | 322 | else: |
168 | # html5lib sometimes calls setAttributes twice | 323 | # html5lib sometimes calls setAttributes twice |
169 | # for the same tag when rearranging the parse | 324 | # for the same tag when rearranging the parse |
@@ -174,9 +329,13 @@ class TreeBuilder(object): | |||
174 | values = value | 329 | values = value |
175 | attrs[attr] = values | 330 | attrs[attr] = values |
176 | return attrs | 331 | return attrs |
177 | 332 | ||
178 | class SAXTreeBuilder(TreeBuilder): | 333 | class SAXTreeBuilder(TreeBuilder): |
179 | """A Beautiful Soup treebuilder that listens for SAX events.""" | 334 | """A Beautiful Soup treebuilder that listens for SAX events. |
335 | |||
336 | This is not currently used for anything, but it demonstrates | ||
337 | how a simple TreeBuilder would work. | ||
338 | """ | ||
180 | 339 | ||
181 | def feed(self, markup): | 340 | def feed(self, markup): |
182 | raise NotImplementedError() | 341 | raise NotImplementedError() |
@@ -186,11 +345,11 @@ class SAXTreeBuilder(TreeBuilder): | |||
186 | 345 | ||
187 | def startElement(self, name, attrs): | 346 | def startElement(self, name, attrs): |
188 | attrs = dict((key[1], value) for key, value in list(attrs.items())) | 347 | attrs = dict((key[1], value) for key, value in list(attrs.items())) |
189 | #print "Start %s, %r" % (name, attrs) | 348 | #print("Start %s, %r" % (name, attrs)) |
190 | self.soup.handle_starttag(name, attrs) | 349 | self.soup.handle_starttag(name, attrs) |
191 | 350 | ||
192 | def endElement(self, name): | 351 | def endElement(self, name): |
193 | #print "End %s" % name | 352 | #print("End %s" % name) |
194 | self.soup.handle_endtag(name) | 353 | self.soup.handle_endtag(name) |
195 | 354 | ||
196 | def startElementNS(self, nsTuple, nodeName, attrs): | 355 | def startElementNS(self, nsTuple, nodeName, attrs): |
@@ -227,10 +386,44 @@ class HTMLTreeBuilder(TreeBuilder): | |||
227 | Such as which tags are empty-element tags. | 386 | Such as which tags are empty-element tags. |
228 | """ | 387 | """ |
229 | 388 | ||
230 | preserve_whitespace_tags = set(['pre', 'textarea']) | 389 | empty_element_tags = set([ |
231 | empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta', | 390 | # These are from HTML5. |
232 | 'spacer', 'link', 'frame', 'base']) | 391 | 'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen', 'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr', |
233 | 392 | ||
393 | # These are from earlier versions of HTML and are removed in HTML5. | ||
394 | 'basefont', 'bgsound', 'command', 'frame', 'image', 'isindex', 'nextid', 'spacer' | ||
395 | ]) | ||
396 | |||
397 | # The HTML standard defines these as block-level elements. Beautiful | ||
398 | # Soup does not treat these elements differently from other elements, | ||
399 | # but it may do so eventually, and this information is available if | ||
400 | # you need to use it. | ||
401 | block_elements = set(["address", "article", "aside", "blockquote", "canvas", "dd", "div", "dl", "dt", "fieldset", "figcaption", "figure", "footer", "form", "h1", "h2", "h3", "h4", "h5", "h6", "header", "hr", "li", "main", "nav", "noscript", "ol", "output", "p", "pre", "section", "table", "tfoot", "ul", "video"]) | ||
402 | |||
403 | # These HTML tags need special treatment so they can be | ||
404 | # represented by a string class other than NavigableString. | ||
405 | # | ||
406 | # For some of these tags, it's because the HTML standard defines | ||
407 | # an unusual content model for them. I made this list by going | ||
408 | # through the HTML spec | ||
409 | # (https://html.spec.whatwg.org/#metadata-content) and looking for | ||
410 | # "metadata content" elements that can contain strings. | ||
411 | # | ||
412 | # The Ruby tags (<rt> and <rp>) are here despite being normal | ||
413 | # "phrasing content" tags, because the content they contain is | ||
414 | # qualitatively different from other text in the document, and it | ||
415 | # can be useful to be able to distinguish it. | ||
416 | # | ||
417 | # TODO: Arguably <noscript> could go here but it seems | ||
418 | # qualitatively different from the other tags. | ||
419 | DEFAULT_STRING_CONTAINERS = { | ||
420 | 'rt' : RubyTextString, | ||
421 | 'rp' : RubyParenthesisString, | ||
422 | 'style': Stylesheet, | ||
423 | 'script': Script, | ||
424 | 'template': TemplateString, | ||
425 | } | ||
426 | |||
234 | # The HTML standard defines these attributes as containing a | 427 | # The HTML standard defines these attributes as containing a |
235 | # space-separated list of values, not a single value. That is, | 428 | # space-separated list of values, not a single value. That is, |
236 | # class="foo bar" means that the 'class' attribute has two values, | 429 | # class="foo bar" means that the 'class' attribute has two values, |
@@ -238,7 +431,7 @@ class HTMLTreeBuilder(TreeBuilder): | |||
238 | # encounter one of these attributes, we will parse its value into | 431 | # encounter one of these attributes, we will parse its value into |
239 | # a list of values if possible. Upon output, the list will be | 432 | # a list of values if possible. Upon output, the list will be |
240 | # converted back into a string. | 433 | # converted back into a string. |
241 | cdata_list_attributes = { | 434 | DEFAULT_CDATA_LIST_ATTRIBUTES = { |
242 | "*" : ['class', 'accesskey', 'dropzone'], | 435 | "*" : ['class', 'accesskey', 'dropzone'], |
243 | "a" : ['rel', 'rev'], | 436 | "a" : ['rel', 'rev'], |
244 | "link" : ['rel', 'rev'], | 437 | "link" : ['rel', 'rev'], |
@@ -255,7 +448,19 @@ class HTMLTreeBuilder(TreeBuilder): | |||
255 | "output" : ["for"], | 448 | "output" : ["for"], |
256 | } | 449 | } |
257 | 450 | ||
451 | DEFAULT_PRESERVE_WHITESPACE_TAGS = set(['pre', 'textarea']) | ||
452 | |||
258 | def set_up_substitutions(self, tag): | 453 | def set_up_substitutions(self, tag): |
454 | """Replace the declared encoding in a <meta> tag with a placeholder, | ||
455 | to be substituted when the tag is output to a string. | ||
456 | |||
457 | An HTML document may come in to Beautiful Soup as one | ||
458 | encoding, but exit in a different encoding, and the <meta> tag | ||
459 | needs to be changed to reflect this. | ||
460 | |||
461 | :param tag: A `Tag` | ||
462 | :return: Whether or not a substitution was performed. | ||
463 | """ | ||
259 | # We are only interested in <meta> tags | 464 | # We are only interested in <meta> tags |
260 | if tag.name != 'meta': | 465 | if tag.name != 'meta': |
261 | return False | 466 | return False |
@@ -288,10 +493,107 @@ class HTMLTreeBuilder(TreeBuilder): | |||
288 | 493 | ||
289 | return (meta_encoding is not None) | 494 | return (meta_encoding is not None) |
290 | 495 | ||
496 | class DetectsXMLParsedAsHTML(object): | ||
497 | """A mixin class for any class (a TreeBuilder, or some class used by a | ||
498 | TreeBuilder) that's in a position to detect whether an XML | ||
499 | document is being incorrectly parsed as HTML, and issue an | ||
500 | appropriate warning. | ||
501 | |||
502 | This requires being able to observe an incoming processing | ||
503 | instruction that might be an XML declaration, and also able to | ||
504 | observe tags as they're opened. If you can't do that for a given | ||
505 | TreeBuilder, there's a less reliable implementation based on | ||
506 | examining the raw markup. | ||
507 | """ | ||
508 | |||
509 | # Regular expression for seeing if markup has an <html> tag. | ||
510 | LOOKS_LIKE_HTML = re.compile("<[^ +]html", re.I) | ||
511 | LOOKS_LIKE_HTML_B = re.compile(b"<[^ +]html", re.I) | ||
512 | |||
513 | XML_PREFIX = '<?xml' | ||
514 | XML_PREFIX_B = b'<?xml' | ||
515 | |||
516 | @classmethod | ||
517 | def warn_if_markup_looks_like_xml(cls, markup, stacklevel=3): | ||
518 | """Perform a check on some markup to see if it looks like XML | ||
519 | that's not XHTML. If so, issue a warning. | ||
520 | |||
521 | This is much less reliable than doing the check while parsing, | ||
522 | but some of the tree builders can't do that. | ||
523 | |||
524 | :param stacklevel: The stacklevel of the code calling this | ||
525 | function. | ||
526 | |||
527 | :return: True if the markup looks like non-XHTML XML, False | ||
528 | otherwise. | ||
529 | |||
530 | """ | ||
531 | if isinstance(markup, bytes): | ||
532 | prefix = cls.XML_PREFIX_B | ||
533 | looks_like_html = cls.LOOKS_LIKE_HTML_B | ||
534 | else: | ||
535 | prefix = cls.XML_PREFIX | ||
536 | looks_like_html = cls.LOOKS_LIKE_HTML | ||
537 | |||
538 | if (markup is not None | ||
539 | and markup.startswith(prefix) | ||
540 | and not looks_like_html.search(markup[:500]) | ||
541 | ): | ||
542 | cls._warn(stacklevel=stacklevel+2) | ||
543 | return True | ||
544 | return False | ||
545 | |||
546 | @classmethod | ||
547 | def _warn(cls, stacklevel=5): | ||
548 | """Issue a warning about XML being parsed as HTML.""" | ||
549 | warnings.warn( | ||
550 | XMLParsedAsHTMLWarning.MESSAGE, XMLParsedAsHTMLWarning, | ||
551 | stacklevel=stacklevel | ||
552 | ) | ||
553 | |||
554 | def _initialize_xml_detector(self): | ||
555 | """Call this method before parsing a document.""" | ||
556 | self._first_processing_instruction = None | ||
557 | self._root_tag = None | ||
558 | |||
559 | def _document_might_be_xml(self, processing_instruction): | ||
560 | """Call this method when encountering an XML declaration, or a | ||
561 | "processing instruction" that might be an XML declaration. | ||
562 | """ | ||
563 | if (self._first_processing_instruction is not None | ||
564 | or self._root_tag is not None): | ||
565 | # The document has already started. Don't bother checking | ||
566 | # anymore. | ||
567 | return | ||
568 | |||
569 | self._first_processing_instruction = processing_instruction | ||
570 | |||
571 | # We won't know until we encounter the first tag whether or | ||
572 | # not this is actually a problem. | ||
573 | |||
574 | def _root_tag_encountered(self, name): | ||
575 | """Call this when you encounter the document's root tag. | ||
576 | |||
577 | This is where we actually check whether an XML document is | ||
578 | being incorrectly parsed as HTML, and issue the warning. | ||
579 | """ | ||
580 | if self._root_tag is not None: | ||
581 | # This method was incorrectly called multiple times. Do | ||
582 | # nothing. | ||
583 | return | ||
584 | |||
585 | self._root_tag = name | ||
586 | if (name != 'html' and self._first_processing_instruction is not None | ||
587 | and self._first_processing_instruction.lower().startswith('xml ')): | ||
588 | # We encountered an XML declaration and then a tag other | ||
589 | # than 'html'. This is a reliable indicator that a | ||
590 | # non-XHTML document is being parsed as XML. | ||
591 | self._warn() | ||
592 | |||
593 | |||
291 | def register_treebuilders_from(module): | 594 | def register_treebuilders_from(module): |
292 | """Copy TreeBuilders from the given module into this module.""" | 595 | """Copy TreeBuilders from the given module into this module.""" |
293 | # I'm fairly sure this is not the best way to do this. | 596 | this_module = sys.modules[__name__] |
294 | this_module = sys.modules['bs4.builder'] | ||
295 | for name in module.__all__: | 597 | for name in module.__all__: |
296 | obj = getattr(module, name) | 598 | obj = getattr(module, name) |
297 | 599 | ||
@@ -302,12 +604,22 @@ def register_treebuilders_from(module): | |||
302 | this_module.builder_registry.register(obj) | 604 | this_module.builder_registry.register(obj) |
303 | 605 | ||
304 | class ParserRejectedMarkup(Exception): | 606 | class ParserRejectedMarkup(Exception): |
305 | pass | 607 | """An Exception to be raised when the underlying parser simply |
306 | 608 | refuses to parse the given markup. | |
609 | """ | ||
610 | def __init__(self, message_or_exception): | ||
611 | """Explain why the parser rejected the given markup, either | ||
612 | with a textual explanation or another exception. | ||
613 | """ | ||
614 | if isinstance(message_or_exception, Exception): | ||
615 | e = message_or_exception | ||
616 | message_or_exception = "%s: %s" % (e.__class__.__name__, str(e)) | ||
617 | super(ParserRejectedMarkup, self).__init__(message_or_exception) | ||
618 | |||
307 | # Builders are registered in reverse order of priority, so that custom | 619 | # Builders are registered in reverse order of priority, so that custom |
308 | # builder registrations will take precedence. In general, we want lxml | 620 | # builder registrations will take precedence. In general, we want lxml |
309 | # to take precedence over html5lib, because it's faster. And we only | 621 | # to take precedence over html5lib, because it's faster. And we only |
310 | # want to use HTMLParser as a last result. | 622 | # want to use HTMLParser as a last resort. |
311 | from . import _htmlparser | 623 | from . import _htmlparser |
312 | register_treebuilders_from(_htmlparser) | 624 | register_treebuilders_from(_htmlparser) |
313 | try: | 625 | try: |
diff --git a/bitbake/lib/bs4/builder/_html5lib.py b/bitbake/lib/bs4/builder/_html5lib.py index 9e9216ef9c..7c46a85118 100644 --- a/bitbake/lib/bs4/builder/_html5lib.py +++ b/bitbake/lib/bs4/builder/_html5lib.py | |||
@@ -1,9 +1,14 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | __all__ = [ | 4 | __all__ = [ |
2 | 'HTML5TreeBuilder', | 5 | 'HTML5TreeBuilder', |
3 | ] | 6 | ] |
4 | 7 | ||
5 | import warnings | 8 | import warnings |
9 | import re | ||
6 | from bs4.builder import ( | 10 | from bs4.builder import ( |
11 | DetectsXMLParsedAsHTML, | ||
7 | PERMISSIVE, | 12 | PERMISSIVE, |
8 | HTML, | 13 | HTML, |
9 | HTML_5, | 14 | HTML_5, |
@@ -11,17 +16,13 @@ from bs4.builder import ( | |||
11 | ) | 16 | ) |
12 | from bs4.element import ( | 17 | from bs4.element import ( |
13 | NamespacedAttribute, | 18 | NamespacedAttribute, |
14 | whitespace_re, | 19 | nonwhitespace_re, |
15 | ) | 20 | ) |
16 | import html5lib | 21 | import html5lib |
17 | try: | 22 | from html5lib.constants import ( |
18 | # html5lib >= 0.99999999/1.0b9 | 23 | namespaces, |
19 | from html5lib.treebuilders import base as treebuildersbase | 24 | prefixes, |
20 | except ImportError: | 25 | ) |
21 | # html5lib <= 0.9999999/1.0b8 | ||
22 | from html5lib.treebuilders import _base as treebuildersbase | ||
23 | from html5lib.constants import namespaces | ||
24 | |||
25 | from bs4.element import ( | 26 | from bs4.element import ( |
26 | Comment, | 27 | Comment, |
27 | Doctype, | 28 | Doctype, |
@@ -29,13 +30,37 @@ from bs4.element import ( | |||
29 | Tag, | 30 | Tag, |
30 | ) | 31 | ) |
31 | 32 | ||
33 | try: | ||
34 | # Pre-0.99999999 | ||
35 | from html5lib.treebuilders import _base as treebuilder_base | ||
36 | new_html5lib = False | ||
37 | except ImportError as e: | ||
38 | # 0.99999999 and up | ||
39 | from html5lib.treebuilders import base as treebuilder_base | ||
40 | new_html5lib = True | ||
41 | |||
32 | class HTML5TreeBuilder(HTMLTreeBuilder): | 42 | class HTML5TreeBuilder(HTMLTreeBuilder): |
33 | """Use html5lib to build a tree.""" | 43 | """Use html5lib to build a tree. |
44 | |||
45 | Note that this TreeBuilder does not support some features common | ||
46 | to HTML TreeBuilders. Some of these features could theoretically | ||
47 | be implemented, but at the very least it's quite difficult, | ||
48 | because html5lib moves the parse tree around as it's being built. | ||
49 | |||
50 | * This TreeBuilder doesn't use different subclasses of NavigableString | ||
51 | based on the name of the tag in which the string was found. | ||
52 | |||
53 | * You can't use a SoupStrainer to parse only part of a document. | ||
54 | """ | ||
34 | 55 | ||
35 | NAME = "html5lib" | 56 | NAME = "html5lib" |
36 | 57 | ||
37 | features = [NAME, PERMISSIVE, HTML_5, HTML] | 58 | features = [NAME, PERMISSIVE, HTML_5, HTML] |
38 | 59 | ||
60 | # html5lib can tell us which line number and position in the | ||
61 | # original file is the source of an element. | ||
62 | TRACKS_LINE_NUMBERS = True | ||
63 | |||
39 | def prepare_markup(self, markup, user_specified_encoding, | 64 | def prepare_markup(self, markup, user_specified_encoding, |
40 | document_declared_encoding=None, exclude_encodings=None): | 65 | document_declared_encoding=None, exclude_encodings=None): |
41 | # Store the user-specified encoding for use later on. | 66 | # Store the user-specified encoding for use later on. |
@@ -45,27 +70,56 @@ class HTML5TreeBuilder(HTMLTreeBuilder): | |||
45 | # ATM because the html5lib TreeBuilder doesn't use | 70 | # ATM because the html5lib TreeBuilder doesn't use |
46 | # UnicodeDammit. | 71 | # UnicodeDammit. |
47 | if exclude_encodings: | 72 | if exclude_encodings: |
48 | warnings.warn("You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.") | 73 | warnings.warn( |
74 | "You provided a value for exclude_encoding, but the html5lib tree builder doesn't support exclude_encoding.", | ||
75 | stacklevel=3 | ||
76 | ) | ||
77 | |||
78 | # html5lib only parses HTML, so if it's given XML that's worth | ||
79 | # noting. | ||
80 | DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( | ||
81 | markup, stacklevel=3 | ||
82 | ) | ||
83 | |||
49 | yield (markup, None, None, False) | 84 | yield (markup, None, None, False) |
50 | 85 | ||
51 | # These methods are defined by Beautiful Soup. | 86 | # These methods are defined by Beautiful Soup. |
52 | def feed(self, markup): | 87 | def feed(self, markup): |
53 | if self.soup.parse_only is not None: | 88 | if self.soup.parse_only is not None: |
54 | warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.") | 89 | warnings.warn( |
90 | "You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.", | ||
91 | stacklevel=4 | ||
92 | ) | ||
55 | parser = html5lib.HTMLParser(tree=self.create_treebuilder) | 93 | parser = html5lib.HTMLParser(tree=self.create_treebuilder) |
56 | doc = parser.parse(markup, encoding=self.user_specified_encoding) | 94 | self.underlying_builder.parser = parser |
57 | 95 | extra_kwargs = dict() | |
96 | if not isinstance(markup, str): | ||
97 | if new_html5lib: | ||
98 | extra_kwargs['override_encoding'] = self.user_specified_encoding | ||
99 | else: | ||
100 | extra_kwargs['encoding'] = self.user_specified_encoding | ||
101 | doc = parser.parse(markup, **extra_kwargs) | ||
102 | |||
58 | # Set the character encoding detected by the tokenizer. | 103 | # Set the character encoding detected by the tokenizer. |
59 | if isinstance(markup, str): | 104 | if isinstance(markup, str): |
60 | # We need to special-case this because html5lib sets | 105 | # We need to special-case this because html5lib sets |
61 | # charEncoding to UTF-8 if it gets Unicode input. | 106 | # charEncoding to UTF-8 if it gets Unicode input. |
62 | doc.original_encoding = None | 107 | doc.original_encoding = None |
63 | else: | 108 | else: |
64 | doc.original_encoding = parser.tokenizer.stream.charEncoding[0] | 109 | original_encoding = parser.tokenizer.stream.charEncoding[0] |
65 | 110 | if not isinstance(original_encoding, str): | |
111 | # In 0.99999999 and up, the encoding is an html5lib | ||
112 | # Encoding object. We want to use a string for compatibility | ||
113 | # with other tree builders. | ||
114 | original_encoding = original_encoding.name | ||
115 | doc.original_encoding = original_encoding | ||
116 | self.underlying_builder.parser = None | ||
117 | |||
66 | def create_treebuilder(self, namespaceHTMLElements): | 118 | def create_treebuilder(self, namespaceHTMLElements): |
67 | self.underlying_builder = TreeBuilderForHtml5lib( | 119 | self.underlying_builder = TreeBuilderForHtml5lib( |
68 | self.soup, namespaceHTMLElements) | 120 | namespaceHTMLElements, self.soup, |
121 | store_line_numbers=self.store_line_numbers | ||
122 | ) | ||
69 | return self.underlying_builder | 123 | return self.underlying_builder |
70 | 124 | ||
71 | def test_fragment_to_document(self, fragment): | 125 | def test_fragment_to_document(self, fragment): |
@@ -73,12 +127,30 @@ class HTML5TreeBuilder(HTMLTreeBuilder): | |||
73 | return '<html><head></head><body>%s</body></html>' % fragment | 127 | return '<html><head></head><body>%s</body></html>' % fragment |
74 | 128 | ||
75 | 129 | ||
76 | class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | 130 | class TreeBuilderForHtml5lib(treebuilder_base.TreeBuilder): |
77 | 131 | ||
78 | def __init__(self, soup, namespaceHTMLElements): | 132 | def __init__(self, namespaceHTMLElements, soup=None, |
79 | self.soup = soup | 133 | store_line_numbers=True, **kwargs): |
134 | if soup: | ||
135 | self.soup = soup | ||
136 | else: | ||
137 | from bs4 import BeautifulSoup | ||
138 | # TODO: Why is the parser 'html.parser' here? To avoid an | ||
139 | # infinite loop? | ||
140 | self.soup = BeautifulSoup( | ||
141 | "", "html.parser", store_line_numbers=store_line_numbers, | ||
142 | **kwargs | ||
143 | ) | ||
144 | # TODO: What are **kwargs exactly? Should they be passed in | ||
145 | # here in addition to/instead of being passed to the BeautifulSoup | ||
146 | # constructor? | ||
80 | super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements) | 147 | super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements) |
81 | 148 | ||
149 | # This will be set later to an html5lib.html5parser.HTMLParser | ||
150 | # object, which we can use to track the current line number. | ||
151 | self.parser = None | ||
152 | self.store_line_numbers = store_line_numbers | ||
153 | |||
82 | def documentClass(self): | 154 | def documentClass(self): |
83 | self.soup.reset() | 155 | self.soup.reset() |
84 | return Element(self.soup, self.soup, None) | 156 | return Element(self.soup, self.soup, None) |
@@ -92,14 +164,26 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | |||
92 | self.soup.object_was_parsed(doctype) | 164 | self.soup.object_was_parsed(doctype) |
93 | 165 | ||
94 | def elementClass(self, name, namespace): | 166 | def elementClass(self, name, namespace): |
95 | tag = self.soup.new_tag(name, namespace) | 167 | kwargs = {} |
168 | if self.parser and self.store_line_numbers: | ||
169 | # This represents the point immediately after the end of the | ||
170 | # tag. We don't know when the tag started, but we do know | ||
171 | # where it ended -- the character just before this one. | ||
172 | sourceline, sourcepos = self.parser.tokenizer.stream.position() | ||
173 | kwargs['sourceline'] = sourceline | ||
174 | kwargs['sourcepos'] = sourcepos-1 | ||
175 | tag = self.soup.new_tag(name, namespace, **kwargs) | ||
176 | |||
96 | return Element(tag, self.soup, namespace) | 177 | return Element(tag, self.soup, namespace) |
97 | 178 | ||
98 | def commentClass(self, data): | 179 | def commentClass(self, data): |
99 | return TextNode(Comment(data), self.soup) | 180 | return TextNode(Comment(data), self.soup) |
100 | 181 | ||
101 | def fragmentClass(self): | 182 | def fragmentClass(self): |
102 | self.soup = BeautifulSoup("") | 183 | from bs4 import BeautifulSoup |
184 | # TODO: Why is the parser 'html.parser' here? To avoid an | ||
185 | # infinite loop? | ||
186 | self.soup = BeautifulSoup("", "html.parser") | ||
103 | self.soup.name = "[document_fragment]" | 187 | self.soup.name = "[document_fragment]" |
104 | return Element(self.soup, self.soup, None) | 188 | return Element(self.soup, self.soup, None) |
105 | 189 | ||
@@ -111,7 +195,57 @@ class TreeBuilderForHtml5lib(treebuildersbase.TreeBuilder): | |||
111 | return self.soup | 195 | return self.soup |
112 | 196 | ||
113 | def getFragment(self): | 197 | def getFragment(self): |
114 | return treebuildersbase.TreeBuilder.getFragment(self).element | 198 | return treebuilder_base.TreeBuilder.getFragment(self).element |
199 | |||
200 | def testSerializer(self, element): | ||
201 | from bs4 import BeautifulSoup | ||
202 | rv = [] | ||
203 | doctype_re = re.compile(r'^(.*?)(?: PUBLIC "(.*?)"(?: "(.*?)")?| SYSTEM "(.*?)")?$') | ||
204 | |||
205 | def serializeElement(element, indent=0): | ||
206 | if isinstance(element, BeautifulSoup): | ||
207 | pass | ||
208 | if isinstance(element, Doctype): | ||
209 | m = doctype_re.match(element) | ||
210 | if m: | ||
211 | name = m.group(1) | ||
212 | if m.lastindex > 1: | ||
213 | publicId = m.group(2) or "" | ||
214 | systemId = m.group(3) or m.group(4) or "" | ||
215 | rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" % | ||
216 | (' ' * indent, name, publicId, systemId)) | ||
217 | else: | ||
218 | rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, name)) | ||
219 | else: | ||
220 | rv.append("|%s<!DOCTYPE >" % (' ' * indent,)) | ||
221 | elif isinstance(element, Comment): | ||
222 | rv.append("|%s<!-- %s -->" % (' ' * indent, element)) | ||
223 | elif isinstance(element, NavigableString): | ||
224 | rv.append("|%s\"%s\"" % (' ' * indent, element)) | ||
225 | else: | ||
226 | if element.namespace: | ||
227 | name = "%s %s" % (prefixes[element.namespace], | ||
228 | element.name) | ||
229 | else: | ||
230 | name = element.name | ||
231 | rv.append("|%s<%s>" % (' ' * indent, name)) | ||
232 | if element.attrs: | ||
233 | attributes = [] | ||
234 | for name, value in list(element.attrs.items()): | ||
235 | if isinstance(name, NamespacedAttribute): | ||
236 | name = "%s %s" % (prefixes[name.namespace], name.name) | ||
237 | if isinstance(value, list): | ||
238 | value = " ".join(value) | ||
239 | attributes.append((name, value)) | ||
240 | |||
241 | for name, value in sorted(attributes): | ||
242 | rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value)) | ||
243 | indent += 2 | ||
244 | for child in element.children: | ||
245 | serializeElement(child, indent) | ||
246 | serializeElement(element, 0) | ||
247 | |||
248 | return "\n".join(rv) | ||
115 | 249 | ||
116 | class AttrList(object): | 250 | class AttrList(object): |
117 | def __init__(self, element): | 251 | def __init__(self, element): |
@@ -122,14 +256,14 @@ class AttrList(object): | |||
122 | def __setitem__(self, name, value): | 256 | def __setitem__(self, name, value): |
123 | # If this attribute is a multi-valued attribute for this element, | 257 | # If this attribute is a multi-valued attribute for this element, |
124 | # turn its value into a list. | 258 | # turn its value into a list. |
125 | list_attr = HTML5TreeBuilder.cdata_list_attributes | 259 | list_attr = self.element.cdata_list_attributes or {} |
126 | if (name in list_attr['*'] | 260 | if (name in list_attr.get('*', []) |
127 | or (self.element.name in list_attr | 261 | or (self.element.name in list_attr |
128 | and name in list_attr[self.element.name])): | 262 | and name in list_attr.get(self.element.name, []))): |
129 | # A node that is being cloned may have already undergone | 263 | # A node that is being cloned may have already undergone |
130 | # this procedure. | 264 | # this procedure. |
131 | if not isinstance(value, list): | 265 | if not isinstance(value, list): |
132 | value = whitespace_re.split(value) | 266 | value = nonwhitespace_re.findall(value) |
133 | self.element[name] = value | 267 | self.element[name] = value |
134 | def items(self): | 268 | def items(self): |
135 | return list(self.attrs.items()) | 269 | return list(self.attrs.items()) |
@@ -143,9 +277,9 @@ class AttrList(object): | |||
143 | return name in list(self.attrs.keys()) | 277 | return name in list(self.attrs.keys()) |
144 | 278 | ||
145 | 279 | ||
146 | class Element(treebuildersbase.Node): | 280 | class Element(treebuilder_base.Node): |
147 | def __init__(self, element, soup, namespace): | 281 | def __init__(self, element, soup, namespace): |
148 | treebuildersbase.Node.__init__(self, element.name) | 282 | treebuilder_base.Node.__init__(self, element.name) |
149 | self.element = element | 283 | self.element = element |
150 | self.soup = soup | 284 | self.soup = soup |
151 | self.namespace = namespace | 285 | self.namespace = namespace |
@@ -164,13 +298,15 @@ class Element(treebuildersbase.Node): | |||
164 | child = node | 298 | child = node |
165 | elif node.element.__class__ == NavigableString: | 299 | elif node.element.__class__ == NavigableString: |
166 | string_child = child = node.element | 300 | string_child = child = node.element |
301 | node.parent = self | ||
167 | else: | 302 | else: |
168 | child = node.element | 303 | child = node.element |
304 | node.parent = self | ||
169 | 305 | ||
170 | if not isinstance(child, str) and child.parent is not None: | 306 | if not isinstance(child, str) and child.parent is not None: |
171 | node.element.extract() | 307 | node.element.extract() |
172 | 308 | ||
173 | if (string_child and self.element.contents | 309 | if (string_child is not None and self.element.contents |
174 | and self.element.contents[-1].__class__ == NavigableString): | 310 | and self.element.contents[-1].__class__ == NavigableString): |
175 | # We are appending a string onto another string. | 311 | # We are appending a string onto another string. |
176 | # TODO This has O(n^2) performance, for input like | 312 | # TODO This has O(n^2) performance, for input like |
@@ -203,12 +339,12 @@ class Element(treebuildersbase.Node): | |||
203 | most_recent_element=most_recent_element) | 339 | most_recent_element=most_recent_element) |
204 | 340 | ||
205 | def getAttributes(self): | 341 | def getAttributes(self): |
342 | if isinstance(self.element, Comment): | ||
343 | return {} | ||
206 | return AttrList(self.element) | 344 | return AttrList(self.element) |
207 | 345 | ||
208 | def setAttributes(self, attributes): | 346 | def setAttributes(self, attributes): |
209 | |||
210 | if attributes is not None and len(attributes) > 0: | 347 | if attributes is not None and len(attributes) > 0: |
211 | |||
212 | converted_attributes = [] | 348 | converted_attributes = [] |
213 | for name, value in list(attributes.items()): | 349 | for name, value in list(attributes.items()): |
214 | if isinstance(name, tuple): | 350 | if isinstance(name, tuple): |
@@ -230,11 +366,11 @@ class Element(treebuildersbase.Node): | |||
230 | attributes = property(getAttributes, setAttributes) | 366 | attributes = property(getAttributes, setAttributes) |
231 | 367 | ||
232 | def insertText(self, data, insertBefore=None): | 368 | def insertText(self, data, insertBefore=None): |
369 | text = TextNode(self.soup.new_string(data), self.soup) | ||
233 | if insertBefore: | 370 | if insertBefore: |
234 | text = TextNode(self.soup.new_string(data), self.soup) | 371 | self.insertBefore(text, insertBefore) |
235 | self.insertBefore(data, insertBefore) | ||
236 | else: | 372 | else: |
237 | self.appendChild(data) | 373 | self.appendChild(text) |
238 | 374 | ||
239 | def insertBefore(self, node, refNode): | 375 | def insertBefore(self, node, refNode): |
240 | index = self.element.index(refNode.element) | 376 | index = self.element.index(refNode.element) |
@@ -253,9 +389,10 @@ class Element(treebuildersbase.Node): | |||
253 | 389 | ||
254 | def reparentChildren(self, new_parent): | 390 | def reparentChildren(self, new_parent): |
255 | """Move all of this tag's children into another tag.""" | 391 | """Move all of this tag's children into another tag.""" |
256 | # print "MOVE", self.element.contents | 392 | # print("MOVE", self.element.contents) |
257 | # print "FROM", self.element | 393 | # print("FROM", self.element) |
258 | # print "TO", new_parent.element | 394 | # print("TO", new_parent.element) |
395 | |||
259 | element = self.element | 396 | element = self.element |
260 | new_parent_element = new_parent.element | 397 | new_parent_element = new_parent.element |
261 | # Determine what this tag's next_element will be once all the children | 398 | # Determine what this tag's next_element will be once all the children |
@@ -274,29 +411,35 @@ class Element(treebuildersbase.Node): | |||
274 | new_parents_last_descendant_next_element = new_parent_element.next_element | 411 | new_parents_last_descendant_next_element = new_parent_element.next_element |
275 | 412 | ||
276 | to_append = element.contents | 413 | to_append = element.contents |
277 | append_after = new_parent_element.contents | ||
278 | if len(to_append) > 0: | 414 | if len(to_append) > 0: |
279 | # Set the first child's previous_element and previous_sibling | 415 | # Set the first child's previous_element and previous_sibling |
280 | # to elements within the new parent | 416 | # to elements within the new parent |
281 | first_child = to_append[0] | 417 | first_child = to_append[0] |
282 | if new_parents_last_descendant: | 418 | if new_parents_last_descendant is not None: |
283 | first_child.previous_element = new_parents_last_descendant | 419 | first_child.previous_element = new_parents_last_descendant |
284 | else: | 420 | else: |
285 | first_child.previous_element = new_parent_element | 421 | first_child.previous_element = new_parent_element |
286 | first_child.previous_sibling = new_parents_last_child | 422 | first_child.previous_sibling = new_parents_last_child |
287 | if new_parents_last_descendant: | 423 | if new_parents_last_descendant is not None: |
288 | new_parents_last_descendant.next_element = first_child | 424 | new_parents_last_descendant.next_element = first_child |
289 | else: | 425 | else: |
290 | new_parent_element.next_element = first_child | 426 | new_parent_element.next_element = first_child |
291 | if new_parents_last_child: | 427 | if new_parents_last_child is not None: |
292 | new_parents_last_child.next_sibling = first_child | 428 | new_parents_last_child.next_sibling = first_child |
293 | 429 | ||
294 | # Fix the last child's next_element and next_sibling | 430 | # Find the very last element being moved. It is now the |
295 | last_child = to_append[-1] | 431 | # parent's last descendant. It has no .next_sibling and |
296 | last_child.next_element = new_parents_last_descendant_next_element | 432 | # its .next_element is whatever the previous last |
297 | if new_parents_last_descendant_next_element: | 433 | # descendant had. |
298 | new_parents_last_descendant_next_element.previous_element = last_child | 434 | last_childs_last_descendant = to_append[-1]._last_descendant(False, True) |
299 | last_child.next_sibling = None | 435 | |
436 | last_childs_last_descendant.next_element = new_parents_last_descendant_next_element | ||
437 | if new_parents_last_descendant_next_element is not None: | ||
438 | # TODO: This code has no test coverage and I'm not sure | ||
439 | # how to get html5lib to go through this path, but it's | ||
440 | # just the other side of the previous line. | ||
441 | new_parents_last_descendant_next_element.previous_element = last_childs_last_descendant | ||
442 | last_childs_last_descendant.next_sibling = None | ||
300 | 443 | ||
301 | for child in to_append: | 444 | for child in to_append: |
302 | child.parent = new_parent_element | 445 | child.parent = new_parent_element |
@@ -306,9 +449,9 @@ class Element(treebuildersbase.Node): | |||
306 | element.contents = [] | 449 | element.contents = [] |
307 | element.next_element = final_next_element | 450 | element.next_element = final_next_element |
308 | 451 | ||
309 | # print "DONE WITH MOVE" | 452 | # print("DONE WITH MOVE") |
310 | # print "FROM", self.element | 453 | # print("FROM", self.element) |
311 | # print "TO", new_parent_element | 454 | # print("TO", new_parent_element) |
312 | 455 | ||
313 | def cloneNode(self): | 456 | def cloneNode(self): |
314 | tag = self.soup.new_tag(self.element.name, self.namespace) | 457 | tag = self.soup.new_tag(self.element.name, self.namespace) |
@@ -321,7 +464,7 @@ class Element(treebuildersbase.Node): | |||
321 | return self.element.contents | 464 | return self.element.contents |
322 | 465 | ||
323 | def getNameTuple(self): | 466 | def getNameTuple(self): |
324 | if self.namespace is None: | 467 | if self.namespace == None: |
325 | return namespaces["html"], self.name | 468 | return namespaces["html"], self.name |
326 | else: | 469 | else: |
327 | return self.namespace, self.name | 470 | return self.namespace, self.name |
@@ -330,7 +473,7 @@ class Element(treebuildersbase.Node): | |||
330 | 473 | ||
331 | class TextNode(Element): | 474 | class TextNode(Element): |
332 | def __init__(self, element, soup): | 475 | def __init__(self, element, soup): |
333 | treebuildersbase.Node.__init__(self, None) | 476 | treebuilder_base.Node.__init__(self, None) |
334 | self.element = element | 477 | self.element = element |
335 | self.soup = soup | 478 | self.soup = soup |
336 | 479 | ||
diff --git a/bitbake/lib/bs4/builder/_htmlparser.py b/bitbake/lib/bs4/builder/_htmlparser.py index bb0a63f2f3..3cc187f892 100644 --- a/bitbake/lib/bs4/builder/_htmlparser.py +++ b/bitbake/lib/bs4/builder/_htmlparser.py | |||
@@ -1,35 +1,18 @@ | |||
1 | # encoding: utf-8 | ||
1 | """Use the HTMLParser library to parse HTML files that aren't too bad.""" | 2 | """Use the HTMLParser library to parse HTML files that aren't too bad.""" |
2 | 3 | ||
4 | # Use of this source code is governed by the MIT license. | ||
5 | __license__ = "MIT" | ||
6 | |||
3 | __all__ = [ | 7 | __all__ = [ |
4 | 'HTMLParserTreeBuilder', | 8 | 'HTMLParserTreeBuilder', |
5 | ] | 9 | ] |
6 | 10 | ||
7 | from html.parser import HTMLParser | 11 | from html.parser import HTMLParser |
8 | 12 | ||
9 | try: | ||
10 | from html.parser import HTMLParseError | ||
11 | except ImportError as e: | ||
12 | # HTMLParseError is removed in Python 3.5. Since it can never be | ||
13 | # thrown in 3.5, we can just define our own class as a placeholder. | ||
14 | class HTMLParseError(Exception): | ||
15 | pass | ||
16 | |||
17 | import sys | 13 | import sys |
18 | import warnings | 14 | import warnings |
19 | 15 | ||
20 | # Starting in Python 3.2, the HTMLParser constructor takes a 'strict' | ||
21 | # argument, which we'd like to set to False. Unfortunately, | ||
22 | # http://bugs.python.org/issue13273 makes strict=True a better bet | ||
23 | # before Python 3.2.3. | ||
24 | # | ||
25 | # At the end of this file, we monkeypatch HTMLParser so that | ||
26 | # strict=True works well on Python 3.2.2. | ||
27 | major, minor, release = sys.version_info[:3] | ||
28 | CONSTRUCTOR_TAKES_STRICT = major == 3 and minor == 2 and release >= 3 | ||
29 | CONSTRUCTOR_STRICT_IS_DEPRECATED = major == 3 and minor == 3 | ||
30 | CONSTRUCTOR_TAKES_CONVERT_CHARREFS = major == 3 and minor >= 4 | ||
31 | |||
32 | |||
33 | from bs4.element import ( | 16 | from bs4.element import ( |
34 | CData, | 17 | CData, |
35 | Comment, | 18 | Comment, |
@@ -40,6 +23,8 @@ from bs4.element import ( | |||
40 | from bs4.dammit import EntitySubstitution, UnicodeDammit | 23 | from bs4.dammit import EntitySubstitution, UnicodeDammit |
41 | 24 | ||
42 | from bs4.builder import ( | 25 | from bs4.builder import ( |
26 | DetectsXMLParsedAsHTML, | ||
27 | ParserRejectedMarkup, | ||
43 | HTML, | 28 | HTML, |
44 | HTMLTreeBuilder, | 29 | HTMLTreeBuilder, |
45 | STRICT, | 30 | STRICT, |
@@ -48,8 +33,84 @@ from bs4.builder import ( | |||
48 | 33 | ||
49 | HTMLPARSER = 'html.parser' | 34 | HTMLPARSER = 'html.parser' |
50 | 35 | ||
51 | class BeautifulSoupHTMLParser(HTMLParser): | 36 | class BeautifulSoupHTMLParser(HTMLParser, DetectsXMLParsedAsHTML): |
52 | def handle_starttag(self, name, attrs): | 37 | """A subclass of the Python standard library's HTMLParser class, which |
38 | listens for HTMLParser events and translates them into calls | ||
39 | to Beautiful Soup's tree construction API. | ||
40 | """ | ||
41 | |||
42 | # Strategies for handling duplicate attributes | ||
43 | IGNORE = 'ignore' | ||
44 | REPLACE = 'replace' | ||
45 | |||
46 | def __init__(self, *args, **kwargs): | ||
47 | """Constructor. | ||
48 | |||
49 | :param on_duplicate_attribute: A strategy for what to do if a | ||
50 | tag includes the same attribute more than once. Accepted | ||
51 | values are: REPLACE (replace earlier values with later | ||
52 | ones, the default), IGNORE (keep the earliest value | ||
53 | encountered), or a callable. A callable must take three | ||
54 | arguments: the dictionary of attributes already processed, | ||
55 | the name of the duplicate attribute, and the most recent value | ||
56 | encountered. | ||
57 | """ | ||
58 | self.on_duplicate_attribute = kwargs.pop( | ||
59 | 'on_duplicate_attribute', self.REPLACE | ||
60 | ) | ||
61 | HTMLParser.__init__(self, *args, **kwargs) | ||
62 | |||
63 | # Keep a list of empty-element tags that were encountered | ||
64 | # without an explicit closing tag. If we encounter a closing tag | ||
65 | # of this type, we'll associate it with one of those entries. | ||
66 | # | ||
67 | # This isn't a stack because we don't care about the | ||
68 | # order. It's a list of closing tags we've already handled and | ||
69 | # will ignore, assuming they ever show up. | ||
70 | self.already_closed_empty_element = [] | ||
71 | |||
72 | self._initialize_xml_detector() | ||
73 | |||
74 | def error(self, message): | ||
75 | # NOTE: This method is required so long as Python 3.9 is | ||
76 | # supported. The corresponding code is removed from HTMLParser | ||
77 | # in 3.5, but not removed from ParserBase until 3.10. | ||
78 | # https://github.com/python/cpython/issues/76025 | ||
79 | # | ||
80 | # The original implementation turned the error into a warning, | ||
81 | # but in every case I discovered, this made HTMLParser | ||
82 | # immediately crash with an error message that was less | ||
83 | # helpful than the warning. The new implementation makes it | ||
84 | # more clear that html.parser just can't parse this | ||
85 | # markup. The 3.10 implementation does the same, though it | ||
86 | # raises AssertionError rather than calling a method. (We | ||
87 | # catch this error and wrap it in a ParserRejectedMarkup.) | ||
88 | raise ParserRejectedMarkup(message) | ||
89 | |||
90 | def handle_startendtag(self, name, attrs): | ||
91 | """Handle an incoming empty-element tag. | ||
92 | |||
93 | This is only called when the markup looks like <tag/>. | ||
94 | |||
95 | :param name: Name of the tag. | ||
96 | :param attrs: Dictionary of the tag's attributes. | ||
97 | """ | ||
98 | # is_startend() tells handle_starttag not to close the tag | ||
99 | # just because its name matches a known empty-element tag. We | ||
100 | # know that this is an empty-element tag and we want to call | ||
101 | # handle_endtag ourselves. | ||
102 | tag = self.handle_starttag(name, attrs, handle_empty_element=False) | ||
103 | self.handle_endtag(name) | ||
104 | |||
105 | def handle_starttag(self, name, attrs, handle_empty_element=True): | ||
106 | """Handle an opening tag, e.g. '<tag>' | ||
107 | |||
108 | :param name: Name of the tag. | ||
109 | :param attrs: Dictionary of the tag's attributes. | ||
110 | :param handle_empty_element: True if this tag is known to be | ||
111 | an empty-element tag (i.e. there is not expected to be any | ||
112 | closing tag). | ||
113 | """ | ||
53 | # XXX namespace | 114 | # XXX namespace |
54 | attr_dict = {} | 115 | attr_dict = {} |
55 | for key, value in attrs: | 116 | for key, value in attrs: |
@@ -57,20 +118,78 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
57 | # for consistency with the other tree builders. | 118 | # for consistency with the other tree builders. |
58 | if value is None: | 119 | if value is None: |
59 | value = '' | 120 | value = '' |
60 | attr_dict[key] = value | 121 | if key in attr_dict: |
122 | # A single attribute shows up multiple times in this | ||
123 | # tag. How to handle it depends on the | ||
124 | # on_duplicate_attribute setting. | ||
125 | on_dupe = self.on_duplicate_attribute | ||
126 | if on_dupe == self.IGNORE: | ||
127 | pass | ||
128 | elif on_dupe in (None, self.REPLACE): | ||
129 | attr_dict[key] = value | ||
130 | else: | ||
131 | on_dupe(attr_dict, key, value) | ||
132 | else: | ||
133 | attr_dict[key] = value | ||
61 | attrvalue = '""' | 134 | attrvalue = '""' |
62 | self.soup.handle_starttag(name, None, None, attr_dict) | 135 | #print("START", name) |
63 | 136 | sourceline, sourcepos = self.getpos() | |
64 | def handle_endtag(self, name): | 137 | tag = self.soup.handle_starttag( |
65 | self.soup.handle_endtag(name) | 138 | name, None, None, attr_dict, sourceline=sourceline, |
66 | 139 | sourcepos=sourcepos | |
140 | ) | ||
141 | if tag and tag.is_empty_element and handle_empty_element: | ||
142 | # Unlike other parsers, html.parser doesn't send separate end tag | ||
143 | # events for empty-element tags. (It's handled in | ||
144 | # handle_startendtag, but only if the original markup looked like | ||
145 | # <tag/>.) | ||
146 | # | ||
147 | # So we need to call handle_endtag() ourselves. Since we | ||
148 | # know the start event is identical to the end event, we | ||
149 | # don't want handle_endtag() to cross off any previous end | ||
150 | # events for tags of this name. | ||
151 | self.handle_endtag(name, check_already_closed=False) | ||
152 | |||
153 | # But we might encounter an explicit closing tag for this tag | ||
154 | # later on. If so, we want to ignore it. | ||
155 | self.already_closed_empty_element.append(name) | ||
156 | |||
157 | if self._root_tag is None: | ||
158 | self._root_tag_encountered(name) | ||
159 | |||
160 | def handle_endtag(self, name, check_already_closed=True): | ||
161 | """Handle a closing tag, e.g. '</tag>' | ||
162 | |||
163 | :param name: A tag name. | ||
164 | :param check_already_closed: True if this tag is expected to | ||
165 | be the closing portion of an empty-element tag, | ||
166 | e.g. '<tag></tag>'. | ||
167 | """ | ||
168 | #print("END", name) | ||
169 | if check_already_closed and name in self.already_closed_empty_element: | ||
170 | # This is a redundant end tag for an empty-element tag. | ||
171 | # We've already called handle_endtag() for it, so just | ||
172 | # check it off the list. | ||
173 | #print("ALREADY CLOSED", name) | ||
174 | self.already_closed_empty_element.remove(name) | ||
175 | else: | ||
176 | self.soup.handle_endtag(name) | ||
177 | |||
67 | def handle_data(self, data): | 178 | def handle_data(self, data): |
179 | """Handle some textual data that shows up between tags.""" | ||
68 | self.soup.handle_data(data) | 180 | self.soup.handle_data(data) |
69 | 181 | ||
70 | def handle_charref(self, name): | 182 | def handle_charref(self, name): |
71 | # XXX workaround for a bug in HTMLParser. Remove this once | 183 | """Handle a numeric character reference by converting it to the |
72 | # it's fixed in all supported versions. | 184 | corresponding Unicode character and treating it as textual |
73 | # http://bugs.python.org/issue13633 | 185 | data. |
186 | |||
187 | :param name: Character number, possibly in hexadecimal. | ||
188 | """ | ||
189 | # TODO: This was originally a workaround for a bug in | ||
190 | # HTMLParser. (http://bugs.python.org/issue13633) The bug has | ||
191 | # been fixed, but removing this code still makes some | ||
192 | # Beautiful Soup tests fail. This needs investigation. | ||
74 | if name.startswith('x'): | 193 | if name.startswith('x'): |
75 | real_name = int(name.lstrip('x'), 16) | 194 | real_name = int(name.lstrip('x'), 16) |
76 | elif name.startswith('X'): | 195 | elif name.startswith('X'): |
@@ -78,37 +197,71 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
78 | else: | 197 | else: |
79 | real_name = int(name) | 198 | real_name = int(name) |
80 | 199 | ||
81 | try: | 200 | data = None |
82 | data = chr(real_name) | 201 | if real_name < 256: |
83 | except (ValueError, OverflowError) as e: | 202 | # HTML numeric entities are supposed to reference Unicode |
84 | data = "\N{REPLACEMENT CHARACTER}" | 203 | # code points, but sometimes they reference code points in |
85 | 204 | # some other encoding (ahem, Windows-1252). E.g. “ | |
205 | # instead of É for LEFT DOUBLE QUOTATION MARK. This | ||
206 | # code tries to detect this situation and compensate. | ||
207 | for encoding in (self.soup.original_encoding, 'windows-1252'): | ||
208 | if not encoding: | ||
209 | continue | ||
210 | try: | ||
211 | data = bytearray([real_name]).decode(encoding) | ||
212 | except UnicodeDecodeError as e: | ||
213 | pass | ||
214 | if not data: | ||
215 | try: | ||
216 | data = chr(real_name) | ||
217 | except (ValueError, OverflowError) as e: | ||
218 | pass | ||
219 | data = data or "\N{REPLACEMENT CHARACTER}" | ||
86 | self.handle_data(data) | 220 | self.handle_data(data) |
87 | 221 | ||
88 | def handle_entityref(self, name): | 222 | def handle_entityref(self, name): |
223 | """Handle a named entity reference by converting it to the | ||
224 | corresponding Unicode character(s) and treating it as textual | ||
225 | data. | ||
226 | |||
227 | :param name: Name of the entity reference. | ||
228 | """ | ||
89 | character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name) | 229 | character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name) |
90 | if character is not None: | 230 | if character is not None: |
91 | data = character | 231 | data = character |
92 | else: | 232 | else: |
93 | data = "&%s;" % name | 233 | # If this were XML, it would be ambiguous whether "&foo" |
234 | # was an character entity reference with a missing | ||
235 | # semicolon or the literal string "&foo". Since this is | ||
236 | # HTML, we have a complete list of all character entity references, | ||
237 | # and this one wasn't found, so assume it's the literal string "&foo". | ||
238 | data = "&%s" % name | ||
94 | self.handle_data(data) | 239 | self.handle_data(data) |
95 | 240 | ||
96 | def handle_comment(self, data): | 241 | def handle_comment(self, data): |
242 | """Handle an HTML comment. | ||
243 | |||
244 | :param data: The text of the comment. | ||
245 | """ | ||
97 | self.soup.endData() | 246 | self.soup.endData() |
98 | self.soup.handle_data(data) | 247 | self.soup.handle_data(data) |
99 | self.soup.endData(Comment) | 248 | self.soup.endData(Comment) |
100 | 249 | ||
101 | def handle_decl(self, data): | 250 | def handle_decl(self, data): |
251 | """Handle a DOCTYPE declaration. | ||
252 | |||
253 | :param data: The text of the declaration. | ||
254 | """ | ||
102 | self.soup.endData() | 255 | self.soup.endData() |
103 | if data.startswith("DOCTYPE "): | 256 | data = data[len("DOCTYPE "):] |
104 | data = data[len("DOCTYPE "):] | ||
105 | elif data == 'DOCTYPE': | ||
106 | # i.e. "<!DOCTYPE>" | ||
107 | data = '' | ||
108 | self.soup.handle_data(data) | 257 | self.soup.handle_data(data) |
109 | self.soup.endData(Doctype) | 258 | self.soup.endData(Doctype) |
110 | 259 | ||
111 | def unknown_decl(self, data): | 260 | def unknown_decl(self, data): |
261 | """Handle a declaration of unknown type -- probably a CDATA block. | ||
262 | |||
263 | :param data: The text of the declaration. | ||
264 | """ | ||
112 | if data.upper().startswith('CDATA['): | 265 | if data.upper().startswith('CDATA['): |
113 | cls = CData | 266 | cls = CData |
114 | data = data[len('CDATA['):] | 267 | data = data[len('CDATA['):] |
@@ -119,144 +272,116 @@ class BeautifulSoupHTMLParser(HTMLParser): | |||
119 | self.soup.endData(cls) | 272 | self.soup.endData(cls) |
120 | 273 | ||
121 | def handle_pi(self, data): | 274 | def handle_pi(self, data): |
275 | """Handle a processing instruction. | ||
276 | |||
277 | :param data: The text of the instruction. | ||
278 | """ | ||
122 | self.soup.endData() | 279 | self.soup.endData() |
123 | self.soup.handle_data(data) | 280 | self.soup.handle_data(data) |
281 | self._document_might_be_xml(data) | ||
124 | self.soup.endData(ProcessingInstruction) | 282 | self.soup.endData(ProcessingInstruction) |
125 | 283 | ||
126 | 284 | ||
127 | class HTMLParserTreeBuilder(HTMLTreeBuilder): | 285 | class HTMLParserTreeBuilder(HTMLTreeBuilder): |
128 | 286 | """A Beautiful soup `TreeBuilder` that uses the `HTMLParser` parser, | |
287 | found in the Python standard library. | ||
288 | """ | ||
129 | is_xml = False | 289 | is_xml = False |
130 | picklable = True | 290 | picklable = True |
131 | NAME = HTMLPARSER | 291 | NAME = HTMLPARSER |
132 | features = [NAME, HTML, STRICT] | 292 | features = [NAME, HTML, STRICT] |
133 | 293 | ||
134 | def __init__(self, *args, **kwargs): | 294 | # The html.parser knows which line number and position in the |
135 | if CONSTRUCTOR_TAKES_STRICT and not CONSTRUCTOR_STRICT_IS_DEPRECATED: | 295 | # original file is the source of an element. |
136 | kwargs['strict'] = False | 296 | TRACKS_LINE_NUMBERS = True |
137 | if CONSTRUCTOR_TAKES_CONVERT_CHARREFS: | ||
138 | kwargs['convert_charrefs'] = False | ||
139 | self.parser_args = (args, kwargs) | ||
140 | 297 | ||
298 | def __init__(self, parser_args=None, parser_kwargs=None, **kwargs): | ||
299 | """Constructor. | ||
300 | |||
301 | :param parser_args: Positional arguments to pass into | ||
302 | the BeautifulSoupHTMLParser constructor, once it's | ||
303 | invoked. | ||
304 | :param parser_kwargs: Keyword arguments to pass into | ||
305 | the BeautifulSoupHTMLParser constructor, once it's | ||
306 | invoked. | ||
307 | :param kwargs: Keyword arguments for the superclass constructor. | ||
308 | """ | ||
309 | # Some keyword arguments will be pulled out of kwargs and placed | ||
310 | # into parser_kwargs. | ||
311 | extra_parser_kwargs = dict() | ||
312 | for arg in ('on_duplicate_attribute',): | ||
313 | if arg in kwargs: | ||
314 | value = kwargs.pop(arg) | ||
315 | extra_parser_kwargs[arg] = value | ||
316 | super(HTMLParserTreeBuilder, self).__init__(**kwargs) | ||
317 | parser_args = parser_args or [] | ||
318 | parser_kwargs = parser_kwargs or {} | ||
319 | parser_kwargs.update(extra_parser_kwargs) | ||
320 | parser_kwargs['convert_charrefs'] = False | ||
321 | self.parser_args = (parser_args, parser_kwargs) | ||
322 | |||
141 | def prepare_markup(self, markup, user_specified_encoding=None, | 323 | def prepare_markup(self, markup, user_specified_encoding=None, |
142 | document_declared_encoding=None, exclude_encodings=None): | 324 | document_declared_encoding=None, exclude_encodings=None): |
143 | """ | 325 | |
144 | :return: A 4-tuple (markup, original encoding, encoding | 326 | """Run any preliminary steps necessary to make incoming markup |
145 | declared within markup, whether any characters had to be | 327 | acceptable to the parser. |
146 | replaced with REPLACEMENT CHARACTER). | 328 | |
329 | :param markup: Some markup -- probably a bytestring. | ||
330 | :param user_specified_encoding: The user asked to try this encoding. | ||
331 | :param document_declared_encoding: The markup itself claims to be | ||
332 | in this encoding. | ||
333 | :param exclude_encodings: The user asked _not_ to try any of | ||
334 | these encodings. | ||
335 | |||
336 | :yield: A series of 4-tuples: | ||
337 | (markup, encoding, declared encoding, | ||
338 | has undergone character replacement) | ||
339 | |||
340 | Each 4-tuple represents a strategy for converting the | ||
341 | document to Unicode and parsing it. Each strategy will be tried | ||
342 | in turn. | ||
147 | """ | 343 | """ |
148 | if isinstance(markup, str): | 344 | if isinstance(markup, str): |
345 | # Parse Unicode as-is. | ||
149 | yield (markup, None, None, False) | 346 | yield (markup, None, None, False) |
150 | return | 347 | return |
151 | 348 | ||
349 | # Ask UnicodeDammit to sniff the most likely encoding. | ||
350 | |||
351 | # This was provided by the end-user; treat it as a known | ||
352 | # definite encoding per the algorithm laid out in the HTML5 | ||
353 | # spec. (See the EncodingDetector class for details.) | ||
354 | known_definite_encodings = [user_specified_encoding] | ||
355 | |||
356 | # This was found in the document; treat it as a slightly lower-priority | ||
357 | # user encoding. | ||
358 | user_encodings = [document_declared_encoding] | ||
359 | |||
152 | try_encodings = [user_specified_encoding, document_declared_encoding] | 360 | try_encodings = [user_specified_encoding, document_declared_encoding] |
153 | dammit = UnicodeDammit(markup, try_encodings, is_html=True, | 361 | dammit = UnicodeDammit( |
154 | exclude_encodings=exclude_encodings) | 362 | markup, |
363 | known_definite_encodings=known_definite_encodings, | ||
364 | user_encodings=user_encodings, | ||
365 | is_html=True, | ||
366 | exclude_encodings=exclude_encodings | ||
367 | ) | ||
155 | yield (dammit.markup, dammit.original_encoding, | 368 | yield (dammit.markup, dammit.original_encoding, |
156 | dammit.declared_html_encoding, | 369 | dammit.declared_html_encoding, |
157 | dammit.contains_replacement_characters) | 370 | dammit.contains_replacement_characters) |
158 | 371 | ||
159 | def feed(self, markup): | 372 | def feed(self, markup): |
373 | """Run some incoming markup through some parsing process, | ||
374 | populating the `BeautifulSoup` object in self.soup. | ||
375 | """ | ||
160 | args, kwargs = self.parser_args | 376 | args, kwargs = self.parser_args |
161 | parser = BeautifulSoupHTMLParser(*args, **kwargs) | 377 | parser = BeautifulSoupHTMLParser(*args, **kwargs) |
162 | parser.soup = self.soup | 378 | parser.soup = self.soup |
163 | try: | 379 | try: |
164 | parser.feed(markup) | 380 | parser.feed(markup) |
165 | except HTMLParseError as e: | 381 | parser.close() |
166 | warnings.warn(RuntimeWarning( | 382 | except AssertionError as e: |
167 | "Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help.")) | 383 | # html.parser raises AssertionError in rare cases to |
168 | raise e | 384 | # indicate a fatal problem with the markup, especially |
169 | 385 | # when there's an error in the doctype declaration. | |
170 | # Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some | 386 | raise ParserRejectedMarkup(e) |
171 | # 3.2.3 code. This ensures they don't treat markup like <p></p> as a | 387 | parser.already_closed_empty_element = [] |
172 | # string. | ||
173 | # | ||
174 | # XXX This code can be removed once most Python 3 users are on 3.2.3. | ||
175 | if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT: | ||
176 | import re | ||
177 | attrfind_tolerant = re.compile( | ||
178 | r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*' | ||
179 | r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?') | ||
180 | HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant | ||
181 | |||
182 | locatestarttagend = re.compile(r""" | ||
183 | <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name | ||
184 | (?:\s+ # whitespace before attribute name | ||
185 | (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name | ||
186 | (?:\s*=\s* # value indicator | ||
187 | (?:'[^']*' # LITA-enclosed value | ||
188 | |\"[^\"]*\" # LIT-enclosed value | ||
189 | |[^'\">\s]+ # bare value | ||
190 | ) | ||
191 | )? | ||
192 | ) | ||
193 | )* | ||
194 | \s* # trailing whitespace | ||
195 | """, re.VERBOSE) | ||
196 | BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend | ||
197 | |||
198 | from html.parser import tagfind, attrfind | ||
199 | |||
200 | def parse_starttag(self, i): | ||
201 | self.__starttag_text = None | ||
202 | endpos = self.check_for_whole_start_tag(i) | ||
203 | if endpos < 0: | ||
204 | return endpos | ||
205 | rawdata = self.rawdata | ||
206 | self.__starttag_text = rawdata[i:endpos] | ||
207 | |||
208 | # Now parse the data between i+1 and j into a tag and attrs | ||
209 | attrs = [] | ||
210 | match = tagfind.match(rawdata, i+1) | ||
211 | assert match, 'unexpected call to parse_starttag()' | ||
212 | k = match.end() | ||
213 | self.lasttag = tag = rawdata[i+1:k].lower() | ||
214 | while k < endpos: | ||
215 | if self.strict: | ||
216 | m = attrfind.match(rawdata, k) | ||
217 | else: | ||
218 | m = attrfind_tolerant.match(rawdata, k) | ||
219 | if not m: | ||
220 | break | ||
221 | attrname, rest, attrvalue = m.group(1, 2, 3) | ||
222 | if not rest: | ||
223 | attrvalue = None | ||
224 | elif attrvalue[:1] == '\'' == attrvalue[-1:] or \ | ||
225 | attrvalue[:1] == '"' == attrvalue[-1:]: | ||
226 | attrvalue = attrvalue[1:-1] | ||
227 | if attrvalue: | ||
228 | attrvalue = self.unescape(attrvalue) | ||
229 | attrs.append((attrname.lower(), attrvalue)) | ||
230 | k = m.end() | ||
231 | |||
232 | end = rawdata[k:endpos].strip() | ||
233 | if end not in (">", "/>"): | ||
234 | lineno, offset = self.getpos() | ||
235 | if "\n" in self.__starttag_text: | ||
236 | lineno = lineno + self.__starttag_text.count("\n") | ||
237 | offset = len(self.__starttag_text) \ | ||
238 | - self.__starttag_text.rfind("\n") | ||
239 | else: | ||
240 | offset = offset + len(self.__starttag_text) | ||
241 | if self.strict: | ||
242 | self.error("junk characters in start tag: %r" | ||
243 | % (rawdata[k:endpos][:20],)) | ||
244 | self.handle_data(rawdata[i:endpos]) | ||
245 | return endpos | ||
246 | if end.endswith('/>'): | ||
247 | # XHTML-style empty tag: <span attr="value" /> | ||
248 | self.handle_startendtag(tag, attrs) | ||
249 | else: | ||
250 | self.handle_starttag(tag, attrs) | ||
251 | if tag in self.CDATA_CONTENT_ELEMENTS: | ||
252 | self.set_cdata_mode(tag) | ||
253 | return endpos | ||
254 | |||
255 | def set_cdata_mode(self, elem): | ||
256 | self.cdata_elem = elem.lower() | ||
257 | self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I) | ||
258 | |||
259 | BeautifulSoupHTMLParser.parse_starttag = parse_starttag | ||
260 | BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode | ||
261 | |||
262 | CONSTRUCTOR_TAKES_STRICT = True | ||
diff --git a/bitbake/lib/bs4/builder/_lxml.py b/bitbake/lib/bs4/builder/_lxml.py index 9c6c14ee65..4f7cf74681 100644 --- a/bitbake/lib/bs4/builder/_lxml.py +++ b/bitbake/lib/bs4/builder/_lxml.py | |||
@@ -1,19 +1,28 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
2 | __license__ = "MIT" | ||
3 | |||
1 | __all__ = [ | 4 | __all__ = [ |
2 | 'LXMLTreeBuilderForXML', | 5 | 'LXMLTreeBuilderForXML', |
3 | 'LXMLTreeBuilder', | 6 | 'LXMLTreeBuilder', |
4 | ] | 7 | ] |
5 | 8 | ||
9 | try: | ||
10 | from collections.abc import Callable # Python 3.6 | ||
11 | except ImportError as e: | ||
12 | from collections import Callable | ||
13 | |||
6 | from io import BytesIO | 14 | from io import BytesIO |
7 | from io import StringIO | 15 | from io import StringIO |
8 | import collections | ||
9 | from lxml import etree | 16 | from lxml import etree |
10 | from bs4.element import ( | 17 | from bs4.element import ( |
11 | Comment, | 18 | Comment, |
12 | Doctype, | 19 | Doctype, |
13 | NamespacedAttribute, | 20 | NamespacedAttribute, |
14 | ProcessingInstruction, | 21 | ProcessingInstruction, |
22 | XMLProcessingInstruction, | ||
15 | ) | 23 | ) |
16 | from bs4.builder import ( | 24 | from bs4.builder import ( |
25 | DetectsXMLParsedAsHTML, | ||
17 | FAST, | 26 | FAST, |
18 | HTML, | 27 | HTML, |
19 | HTMLTreeBuilder, | 28 | HTMLTreeBuilder, |
@@ -25,10 +34,15 @@ from bs4.dammit import EncodingDetector | |||
25 | 34 | ||
26 | LXML = 'lxml' | 35 | LXML = 'lxml' |
27 | 36 | ||
37 | def _invert(d): | ||
38 | "Invert a dictionary." | ||
39 | return dict((v,k) for k, v in list(d.items())) | ||
40 | |||
28 | class LXMLTreeBuilderForXML(TreeBuilder): | 41 | class LXMLTreeBuilderForXML(TreeBuilder): |
29 | DEFAULT_PARSER_CLASS = etree.XMLParser | 42 | DEFAULT_PARSER_CLASS = etree.XMLParser |
30 | 43 | ||
31 | is_xml = True | 44 | is_xml = True |
45 | processing_instruction_class = XMLProcessingInstruction | ||
32 | 46 | ||
33 | NAME = "lxml-xml" | 47 | NAME = "lxml-xml" |
34 | ALTERNATE_NAMES = ["xml"] | 48 | ALTERNATE_NAMES = ["xml"] |
@@ -40,26 +54,79 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
40 | 54 | ||
41 | # This namespace mapping is specified in the XML Namespace | 55 | # This namespace mapping is specified in the XML Namespace |
42 | # standard. | 56 | # standard. |
43 | DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"} | 57 | DEFAULT_NSMAPS = dict(xml='http://www.w3.org/XML/1998/namespace') |
58 | |||
59 | DEFAULT_NSMAPS_INVERTED = _invert(DEFAULT_NSMAPS) | ||
60 | |||
61 | # NOTE: If we parsed Element objects and looked at .sourceline, | ||
62 | # we'd be able to see the line numbers from the original document. | ||
63 | # But instead we build an XMLParser or HTMLParser object to serve | ||
64 | # as the target of parse messages, and those messages don't include | ||
65 | # line numbers. | ||
66 | # See: https://bugs.launchpad.net/lxml/+bug/1846906 | ||
67 | |||
68 | def initialize_soup(self, soup): | ||
69 | """Let the BeautifulSoup object know about the standard namespace | ||
70 | mapping. | ||
71 | |||
72 | :param soup: A `BeautifulSoup`. | ||
73 | """ | ||
74 | super(LXMLTreeBuilderForXML, self).initialize_soup(soup) | ||
75 | self._register_namespaces(self.DEFAULT_NSMAPS) | ||
76 | |||
77 | def _register_namespaces(self, mapping): | ||
78 | """Let the BeautifulSoup object know about namespaces encountered | ||
79 | while parsing the document. | ||
80 | |||
81 | This might be useful later on when creating CSS selectors. | ||
82 | |||
83 | This will track (almost) all namespaces, even ones that were | ||
84 | only in scope for part of the document. If two namespaces have | ||
85 | the same prefix, only the first one encountered will be | ||
86 | tracked. Un-prefixed namespaces are not tracked. | ||
44 | 87 | ||
88 | :param mapping: A dictionary mapping namespace prefixes to URIs. | ||
89 | """ | ||
90 | for key, value in list(mapping.items()): | ||
91 | # This is 'if key' and not 'if key is not None' because we | ||
92 | # don't track un-prefixed namespaces. Soupselect will | ||
93 | # treat an un-prefixed namespace as the default, which | ||
94 | # causes confusion in some cases. | ||
95 | if key and key not in self.soup._namespaces: | ||
96 | # Let the BeautifulSoup object know about a new namespace. | ||
97 | # If there are multiple namespaces defined with the same | ||
98 | # prefix, the first one in the document takes precedence. | ||
99 | self.soup._namespaces[key] = value | ||
100 | |||
45 | def default_parser(self, encoding): | 101 | def default_parser(self, encoding): |
46 | # This can either return a parser object or a class, which | 102 | """Find the default parser for the given encoding. |
47 | # will be instantiated with default arguments. | 103 | |
104 | :param encoding: A string. | ||
105 | :return: Either a parser object or a class, which | ||
106 | will be instantiated with default arguments. | ||
107 | """ | ||
48 | if self._default_parser is not None: | 108 | if self._default_parser is not None: |
49 | return self._default_parser | 109 | return self._default_parser |
50 | return etree.XMLParser( | 110 | return etree.XMLParser( |
51 | target=self, strip_cdata=False, recover=True, encoding=encoding) | 111 | target=self, strip_cdata=False, recover=True, encoding=encoding) |
52 | 112 | ||
53 | def parser_for(self, encoding): | 113 | def parser_for(self, encoding): |
114 | """Instantiate an appropriate parser for the given encoding. | ||
115 | |||
116 | :param encoding: A string. | ||
117 | :return: A parser object such as an `etree.XMLParser`. | ||
118 | """ | ||
54 | # Use the default parser. | 119 | # Use the default parser. |
55 | parser = self.default_parser(encoding) | 120 | parser = self.default_parser(encoding) |
56 | 121 | ||
57 | if isinstance(parser, collections.Callable): | 122 | if isinstance(parser, Callable): |
58 | # Instantiate the parser with default arguments | 123 | # Instantiate the parser with default arguments |
59 | parser = parser(target=self, strip_cdata=False, encoding=encoding) | 124 | parser = parser( |
125 | target=self, strip_cdata=False, recover=True, encoding=encoding | ||
126 | ) | ||
60 | return parser | 127 | return parser |
61 | 128 | ||
62 | def __init__(self, parser=None, empty_element_tags=None): | 129 | def __init__(self, parser=None, empty_element_tags=None, **kwargs): |
63 | # TODO: Issue a warning if parser is present but not a | 130 | # TODO: Issue a warning if parser is present but not a |
64 | # callable, since that means there's no way to create new | 131 | # callable, since that means there's no way to create new |
65 | # parsers for different encodings. | 132 | # parsers for different encodings. |
@@ -67,8 +134,10 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
67 | if empty_element_tags is not None: | 134 | if empty_element_tags is not None: |
68 | self.empty_element_tags = set(empty_element_tags) | 135 | self.empty_element_tags = set(empty_element_tags) |
69 | self.soup = None | 136 | self.soup = None |
70 | self.nsmaps = [self.DEFAULT_NSMAPS] | 137 | self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] |
71 | 138 | self.active_namespace_prefixes = [dict(self.DEFAULT_NSMAPS)] | |
139 | super(LXMLTreeBuilderForXML, self).__init__(**kwargs) | ||
140 | |||
72 | def _getNsTag(self, tag): | 141 | def _getNsTag(self, tag): |
73 | # Split the namespace URL out of a fully-qualified lxml tag | 142 | # Split the namespace URL out of a fully-qualified lxml tag |
74 | # name. Copied from lxml's src/lxml/sax.py. | 143 | # name. Copied from lxml's src/lxml/sax.py. |
@@ -80,16 +149,51 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
80 | def prepare_markup(self, markup, user_specified_encoding=None, | 149 | def prepare_markup(self, markup, user_specified_encoding=None, |
81 | exclude_encodings=None, | 150 | exclude_encodings=None, |
82 | document_declared_encoding=None): | 151 | document_declared_encoding=None): |
83 | """ | 152 | """Run any preliminary steps necessary to make incoming markup |
84 | :yield: A series of 4-tuples. | 153 | acceptable to the parser. |
154 | |||
155 | lxml really wants to get a bytestring and convert it to | ||
156 | Unicode itself. So instead of using UnicodeDammit to convert | ||
157 | the bytestring to Unicode using different encodings, this | ||
158 | implementation uses EncodingDetector to iterate over the | ||
159 | encodings, and tell lxml to try to parse the document as each | ||
160 | one in turn. | ||
161 | |||
162 | :param markup: Some markup -- hopefully a bytestring. | ||
163 | :param user_specified_encoding: The user asked to try this encoding. | ||
164 | :param document_declared_encoding: The markup itself claims to be | ||
165 | in this encoding. | ||
166 | :param exclude_encodings: The user asked _not_ to try any of | ||
167 | these encodings. | ||
168 | |||
169 | :yield: A series of 4-tuples: | ||
85 | (markup, encoding, declared encoding, | 170 | (markup, encoding, declared encoding, |
86 | has undergone character replacement) | 171 | has undergone character replacement) |
87 | 172 | ||
88 | Each 4-tuple represents a strategy for parsing the document. | 173 | Each 4-tuple represents a strategy for converting the |
174 | document to Unicode and parsing it. Each strategy will be tried | ||
175 | in turn. | ||
89 | """ | 176 | """ |
177 | is_html = not self.is_xml | ||
178 | if is_html: | ||
179 | self.processing_instruction_class = ProcessingInstruction | ||
180 | # We're in HTML mode, so if we're given XML, that's worth | ||
181 | # noting. | ||
182 | DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( | ||
183 | markup, stacklevel=3 | ||
184 | ) | ||
185 | else: | ||
186 | self.processing_instruction_class = XMLProcessingInstruction | ||
187 | |||
90 | if isinstance(markup, str): | 188 | if isinstance(markup, str): |
91 | # We were given Unicode. Maybe lxml can parse Unicode on | 189 | # We were given Unicode. Maybe lxml can parse Unicode on |
92 | # this system? | 190 | # this system? |
191 | |||
192 | # TODO: This is a workaround for | ||
193 | # https://bugs.launchpad.net/lxml/+bug/1948551. | ||
194 | # We can remove it once the upstream issue is fixed. | ||
195 | if len(markup) > 0 and markup[0] == u'\N{BYTE ORDER MARK}': | ||
196 | markup = markup[1:] | ||
93 | yield markup, None, document_declared_encoding, False | 197 | yield markup, None, document_declared_encoding, False |
94 | 198 | ||
95 | if isinstance(markup, str): | 199 | if isinstance(markup, str): |
@@ -98,14 +202,19 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
98 | yield (markup.encode("utf8"), "utf8", | 202 | yield (markup.encode("utf8"), "utf8", |
99 | document_declared_encoding, False) | 203 | document_declared_encoding, False) |
100 | 204 | ||
101 | # Instead of using UnicodeDammit to convert the bytestring to | 205 | # This was provided by the end-user; treat it as a known |
102 | # Unicode using different encodings, use EncodingDetector to | 206 | # definite encoding per the algorithm laid out in the HTML5 |
103 | # iterate over the encodings, and tell lxml to try to parse | 207 | # spec. (See the EncodingDetector class for details.) |
104 | # the document as each one in turn. | 208 | known_definite_encodings = [user_specified_encoding] |
105 | is_html = not self.is_xml | 209 | |
106 | try_encodings = [user_specified_encoding, document_declared_encoding] | 210 | # This was found in the document; treat it as a slightly lower-priority |
211 | # user encoding. | ||
212 | user_encodings = [document_declared_encoding] | ||
107 | detector = EncodingDetector( | 213 | detector = EncodingDetector( |
108 | markup, try_encodings, is_html, exclude_encodings) | 214 | markup, known_definite_encodings=known_definite_encodings, |
215 | user_encodings=user_encodings, is_html=is_html, | ||
216 | exclude_encodings=exclude_encodings | ||
217 | ) | ||
109 | for encoding in detector.encodings: | 218 | for encoding in detector.encodings: |
110 | yield (detector.markup, encoding, document_declared_encoding, False) | 219 | yield (detector.markup, encoding, document_declared_encoding, False) |
111 | 220 | ||
@@ -128,25 +237,45 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
128 | self.parser.feed(data) | 237 | self.parser.feed(data) |
129 | self.parser.close() | 238 | self.parser.close() |
130 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: | 239 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: |
131 | raise ParserRejectedMarkup(str(e)) | 240 | raise ParserRejectedMarkup(e) |
132 | 241 | ||
133 | def close(self): | 242 | def close(self): |
134 | self.nsmaps = [self.DEFAULT_NSMAPS] | 243 | self.nsmaps = [self.DEFAULT_NSMAPS_INVERTED] |
135 | 244 | ||
136 | def start(self, name, attrs, nsmap={}): | 245 | def start(self, name, attrs, nsmap={}): |
137 | # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. | 246 | # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy. |
138 | attrs = dict(attrs) | 247 | attrs = dict(attrs) |
139 | nsprefix = None | 248 | nsprefix = None |
140 | # Invert each namespace map as it comes in. | 249 | # Invert each namespace map as it comes in. |
141 | if len(self.nsmaps) > 1: | 250 | if len(nsmap) == 0 and len(self.nsmaps) > 1: |
142 | # There are no new namespaces for this tag, but | 251 | # There are no new namespaces for this tag, but |
143 | # non-default namespaces are in play, so we need a | 252 | # non-default namespaces are in play, so we need a |
144 | # separate tag stack to know when they end. | 253 | # separate tag stack to know when they end. |
145 | self.nsmaps.append(None) | 254 | self.nsmaps.append(None) |
146 | elif len(nsmap) > 0: | 255 | elif len(nsmap) > 0: |
147 | # A new namespace mapping has come into play. | 256 | # A new namespace mapping has come into play. |
148 | inverted_nsmap = dict((value, key) for key, value in list(nsmap.items())) | 257 | |
149 | self.nsmaps.append(inverted_nsmap) | 258 | # First, Let the BeautifulSoup object know about it. |
259 | self._register_namespaces(nsmap) | ||
260 | |||
261 | # Then, add it to our running list of inverted namespace | ||
262 | # mappings. | ||
263 | self.nsmaps.append(_invert(nsmap)) | ||
264 | |||
265 | # The currently active namespace prefixes have | ||
266 | # changed. Calculate the new mapping so it can be stored | ||
267 | # with all Tag objects created while these prefixes are in | ||
268 | # scope. | ||
269 | current_mapping = dict(self.active_namespace_prefixes[-1]) | ||
270 | current_mapping.update(nsmap) | ||
271 | |||
272 | # We should not track un-prefixed namespaces as we can only hold one | ||
273 | # and it will be recognized as the default namespace by soupsieve, | ||
274 | # which may be confusing in some situations. | ||
275 | if '' in current_mapping: | ||
276 | del current_mapping[''] | ||
277 | self.active_namespace_prefixes.append(current_mapping) | ||
278 | |||
150 | # Also treat the namespace mapping as a set of attributes on the | 279 | # Also treat the namespace mapping as a set of attributes on the |
151 | # tag, so we can recreate it later. | 280 | # tag, so we can recreate it later. |
152 | attrs = attrs.copy() | 281 | attrs = attrs.copy() |
@@ -171,8 +300,11 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
171 | 300 | ||
172 | namespace, name = self._getNsTag(name) | 301 | namespace, name = self._getNsTag(name) |
173 | nsprefix = self._prefix_for_namespace(namespace) | 302 | nsprefix = self._prefix_for_namespace(namespace) |
174 | self.soup.handle_starttag(name, namespace, nsprefix, attrs) | 303 | self.soup.handle_starttag( |
175 | 304 | name, namespace, nsprefix, attrs, | |
305 | namespaces=self.active_namespace_prefixes[-1] | ||
306 | ) | ||
307 | |||
176 | def _prefix_for_namespace(self, namespace): | 308 | def _prefix_for_namespace(self, namespace): |
177 | """Find the currently active prefix for the given namespace.""" | 309 | """Find the currently active prefix for the given namespace.""" |
178 | if namespace is None: | 310 | if namespace is None: |
@@ -196,13 +328,20 @@ class LXMLTreeBuilderForXML(TreeBuilder): | |||
196 | if len(self.nsmaps) > 1: | 328 | if len(self.nsmaps) > 1: |
197 | # This tag, or one of its parents, introduced a namespace | 329 | # This tag, or one of its parents, introduced a namespace |
198 | # mapping, so pop it off the stack. | 330 | # mapping, so pop it off the stack. |
199 | self.nsmaps.pop() | 331 | out_of_scope_nsmap = self.nsmaps.pop() |
200 | 332 | ||
333 | if out_of_scope_nsmap is not None: | ||
334 | # This tag introduced a namespace mapping which is no | ||
335 | # longer in scope. Recalculate the currently active | ||
336 | # namespace prefixes. | ||
337 | self.active_namespace_prefixes.pop() | ||
338 | |||
201 | def pi(self, target, data): | 339 | def pi(self, target, data): |
202 | self.soup.endData() | 340 | self.soup.endData() |
203 | self.soup.handle_data(target + ' ' + data) | 341 | data = target + ' ' + data |
204 | self.soup.endData(ProcessingInstruction) | 342 | self.soup.handle_data(data) |
205 | 343 | self.soup.endData(self.processing_instruction_class) | |
344 | |||
206 | def data(self, content): | 345 | def data(self, content): |
207 | self.soup.handle_data(content) | 346 | self.soup.handle_data(content) |
208 | 347 | ||
@@ -229,6 +368,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): | |||
229 | 368 | ||
230 | features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] | 369 | features = ALTERNATE_NAMES + [NAME, HTML, FAST, PERMISSIVE] |
231 | is_xml = False | 370 | is_xml = False |
371 | processing_instruction_class = ProcessingInstruction | ||
232 | 372 | ||
233 | def default_parser(self, encoding): | 373 | def default_parser(self, encoding): |
234 | return etree.HTMLParser | 374 | return etree.HTMLParser |
@@ -240,7 +380,7 @@ class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): | |||
240 | self.parser.feed(markup) | 380 | self.parser.feed(markup) |
241 | self.parser.close() | 381 | self.parser.close() |
242 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: | 382 | except (UnicodeDecodeError, LookupError, etree.ParserError) as e: |
243 | raise ParserRejectedMarkup(str(e)) | 383 | raise ParserRejectedMarkup(e) |
244 | 384 | ||
245 | 385 | ||
246 | def test_fragment_to_document(self, fragment): | 386 | def test_fragment_to_document(self, fragment): |
diff --git a/bitbake/lib/bs4/css.py b/bitbake/lib/bs4/css.py new file mode 100644 index 0000000000..cd1fd2df88 --- /dev/null +++ b/bitbake/lib/bs4/css.py | |||
@@ -0,0 +1,274 @@ | |||
1 | """Integration code for CSS selectors using Soup Sieve (pypi: soupsieve).""" | ||
2 | |||
3 | # We don't use soupsieve | ||
4 | soupsieve = None | ||
5 | |||
6 | |||
7 | class CSS(object): | ||
8 | """A proxy object against the soupsieve library, to simplify its | ||
9 | CSS selector API. | ||
10 | |||
11 | Acquire this object through the .css attribute on the | ||
12 | BeautifulSoup object, or on the Tag you want to use as the | ||
13 | starting point for a CSS selector. | ||
14 | |||
15 | The main advantage of doing this is that the tag to be selected | ||
16 | against doesn't need to be explicitly specified in the function | ||
17 | calls, since it's already scoped to a tag. | ||
18 | """ | ||
19 | |||
20 | def __init__(self, tag, api=soupsieve): | ||
21 | """Constructor. | ||
22 | |||
23 | You don't need to instantiate this class yourself; instead, | ||
24 | access the .css attribute on the BeautifulSoup object, or on | ||
25 | the Tag you want to use as the starting point for your CSS | ||
26 | selector. | ||
27 | |||
28 | :param tag: All CSS selectors will use this as their starting | ||
29 | point. | ||
30 | |||
31 | :param api: A plug-in replacement for the soupsieve module, | ||
32 | designed mainly for use in tests. | ||
33 | """ | ||
34 | if api is None: | ||
35 | raise NotImplementedError( | ||
36 | "Cannot execute CSS selectors because the soupsieve package is not installed." | ||
37 | ) | ||
38 | self.api = api | ||
39 | self.tag = tag | ||
40 | |||
41 | def escape(self, ident): | ||
42 | """Escape a CSS identifier. | ||
43 | |||
44 | This is a simple wrapper around soupselect.escape(). See the | ||
45 | documentation for that function for more information. | ||
46 | """ | ||
47 | if soupsieve is None: | ||
48 | raise NotImplementedError( | ||
49 | "Cannot escape CSS identifiers because the soupsieve package is not installed." | ||
50 | ) | ||
51 | return self.api.escape(ident) | ||
52 | |||
53 | def _ns(self, ns, select): | ||
54 | """Normalize a dictionary of namespaces.""" | ||
55 | if not isinstance(select, self.api.SoupSieve) and ns is None: | ||
56 | # If the selector is a precompiled pattern, it already has | ||
57 | # a namespace context compiled in, which cannot be | ||
58 | # replaced. | ||
59 | ns = self.tag._namespaces | ||
60 | return ns | ||
61 | |||
62 | def _rs(self, results): | ||
63 | """Normalize a list of results to a Resultset. | ||
64 | |||
65 | A ResultSet is more consistent with the rest of Beautiful | ||
66 | Soup's API, and ResultSet.__getattr__ has a helpful error | ||
67 | message if you try to treat a list of results as a single | ||
68 | result (a common mistake). | ||
69 | """ | ||
70 | # Import here to avoid circular import | ||
71 | from bs4.element import ResultSet | ||
72 | return ResultSet(None, results) | ||
73 | |||
74 | def compile(self, select, namespaces=None, flags=0, **kwargs): | ||
75 | """Pre-compile a selector and return the compiled object. | ||
76 | |||
77 | :param selector: A CSS selector. | ||
78 | |||
79 | :param namespaces: A dictionary mapping namespace prefixes | ||
80 | used in the CSS selector to namespace URIs. By default, | ||
81 | Beautiful Soup will use the prefixes it encountered while | ||
82 | parsing the document. | ||
83 | |||
84 | :param flags: Flags to be passed into Soup Sieve's | ||
85 | soupsieve.compile() method. | ||
86 | |||
87 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
88 | soupsieve.compile() method. | ||
89 | |||
90 | :return: A precompiled selector object. | ||
91 | :rtype: soupsieve.SoupSieve | ||
92 | """ | ||
93 | return self.api.compile( | ||
94 | select, self._ns(namespaces, select), flags, **kwargs | ||
95 | ) | ||
96 | |||
97 | def select_one(self, select, namespaces=None, flags=0, **kwargs): | ||
98 | """Perform a CSS selection operation on the current Tag and return the | ||
99 | first result. | ||
100 | |||
101 | This uses the Soup Sieve library. For more information, see | ||
102 | that library's documentation for the soupsieve.select_one() | ||
103 | method. | ||
104 | |||
105 | :param selector: A CSS selector. | ||
106 | |||
107 | :param namespaces: A dictionary mapping namespace prefixes | ||
108 | used in the CSS selector to namespace URIs. By default, | ||
109 | Beautiful Soup will use the prefixes it encountered while | ||
110 | parsing the document. | ||
111 | |||
112 | :param flags: Flags to be passed into Soup Sieve's | ||
113 | soupsieve.select_one() method. | ||
114 | |||
115 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
116 | soupsieve.select_one() method. | ||
117 | |||
118 | :return: A Tag, or None if the selector has no match. | ||
119 | :rtype: bs4.element.Tag | ||
120 | |||
121 | """ | ||
122 | return self.api.select_one( | ||
123 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
124 | ) | ||
125 | |||
126 | def select(self, select, namespaces=None, limit=0, flags=0, **kwargs): | ||
127 | """Perform a CSS selection operation on the current Tag. | ||
128 | |||
129 | This uses the Soup Sieve library. For more information, see | ||
130 | that library's documentation for the soupsieve.select() | ||
131 | method. | ||
132 | |||
133 | :param selector: A string containing a CSS selector. | ||
134 | |||
135 | :param namespaces: A dictionary mapping namespace prefixes | ||
136 | used in the CSS selector to namespace URIs. By default, | ||
137 | Beautiful Soup will pass in the prefixes it encountered while | ||
138 | parsing the document. | ||
139 | |||
140 | :param limit: After finding this number of results, stop looking. | ||
141 | |||
142 | :param flags: Flags to be passed into Soup Sieve's | ||
143 | soupsieve.select() method. | ||
144 | |||
145 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
146 | soupsieve.select() method. | ||
147 | |||
148 | :return: A ResultSet of Tag objects. | ||
149 | :rtype: bs4.element.ResultSet | ||
150 | |||
151 | """ | ||
152 | if limit is None: | ||
153 | limit = 0 | ||
154 | |||
155 | return self._rs( | ||
156 | self.api.select( | ||
157 | select, self.tag, self._ns(namespaces, select), limit, flags, | ||
158 | **kwargs | ||
159 | ) | ||
160 | ) | ||
161 | |||
162 | def iselect(self, select, namespaces=None, limit=0, flags=0, **kwargs): | ||
163 | """Perform a CSS selection operation on the current Tag. | ||
164 | |||
165 | This uses the Soup Sieve library. For more information, see | ||
166 | that library's documentation for the soupsieve.iselect() | ||
167 | method. It is the same as select(), but it returns a generator | ||
168 | instead of a list. | ||
169 | |||
170 | :param selector: A string containing a CSS selector. | ||
171 | |||
172 | :param namespaces: A dictionary mapping namespace prefixes | ||
173 | used in the CSS selector to namespace URIs. By default, | ||
174 | Beautiful Soup will pass in the prefixes it encountered while | ||
175 | parsing the document. | ||
176 | |||
177 | :param limit: After finding this number of results, stop looking. | ||
178 | |||
179 | :param flags: Flags to be passed into Soup Sieve's | ||
180 | soupsieve.iselect() method. | ||
181 | |||
182 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
183 | soupsieve.iselect() method. | ||
184 | |||
185 | :return: A generator | ||
186 | :rtype: types.GeneratorType | ||
187 | """ | ||
188 | return self.api.iselect( | ||
189 | select, self.tag, self._ns(namespaces, select), limit, flags, **kwargs | ||
190 | ) | ||
191 | |||
192 | def closest(self, select, namespaces=None, flags=0, **kwargs): | ||
193 | """Find the Tag closest to this one that matches the given selector. | ||
194 | |||
195 | This uses the Soup Sieve library. For more information, see | ||
196 | that library's documentation for the soupsieve.closest() | ||
197 | method. | ||
198 | |||
199 | :param selector: A string containing a CSS selector. | ||
200 | |||
201 | :param namespaces: A dictionary mapping namespace prefixes | ||
202 | used in the CSS selector to namespace URIs. By default, | ||
203 | Beautiful Soup will pass in the prefixes it encountered while | ||
204 | parsing the document. | ||
205 | |||
206 | :param flags: Flags to be passed into Soup Sieve's | ||
207 | soupsieve.closest() method. | ||
208 | |||
209 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
210 | soupsieve.closest() method. | ||
211 | |||
212 | :return: A Tag, or None if there is no match. | ||
213 | :rtype: bs4.Tag | ||
214 | |||
215 | """ | ||
216 | return self.api.closest( | ||
217 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
218 | ) | ||
219 | |||
220 | def match(self, select, namespaces=None, flags=0, **kwargs): | ||
221 | """Check whether this Tag matches the given CSS selector. | ||
222 | |||
223 | This uses the Soup Sieve library. For more information, see | ||
224 | that library's documentation for the soupsieve.match() | ||
225 | method. | ||
226 | |||
227 | :param: a CSS selector. | ||
228 | |||
229 | :param namespaces: A dictionary mapping namespace prefixes | ||
230 | used in the CSS selector to namespace URIs. By default, | ||
231 | Beautiful Soup will pass in the prefixes it encountered while | ||
232 | parsing the document. | ||
233 | |||
234 | :param flags: Flags to be passed into Soup Sieve's | ||
235 | soupsieve.match() method. | ||
236 | |||
237 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
238 | soupsieve.match() method. | ||
239 | |||
240 | :return: True if this Tag matches the selector; False otherwise. | ||
241 | :rtype: bool | ||
242 | """ | ||
243 | return self.api.match( | ||
244 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
245 | ) | ||
246 | |||
247 | def filter(self, select, namespaces=None, flags=0, **kwargs): | ||
248 | """Filter this Tag's direct children based on the given CSS selector. | ||
249 | |||
250 | This uses the Soup Sieve library. It works the same way as | ||
251 | passing this Tag into that library's soupsieve.filter() | ||
252 | method. More information, for more information see the | ||
253 | documentation for soupsieve.filter(). | ||
254 | |||
255 | :param namespaces: A dictionary mapping namespace prefixes | ||
256 | used in the CSS selector to namespace URIs. By default, | ||
257 | Beautiful Soup will pass in the prefixes it encountered while | ||
258 | parsing the document. | ||
259 | |||
260 | :param flags: Flags to be passed into Soup Sieve's | ||
261 | soupsieve.filter() method. | ||
262 | |||
263 | :param kwargs: Keyword arguments to be passed into SoupSieve's | ||
264 | soupsieve.filter() method. | ||
265 | |||
266 | :return: A ResultSet of Tag objects. | ||
267 | :rtype: bs4.element.ResultSet | ||
268 | |||
269 | """ | ||
270 | return self._rs( | ||
271 | self.api.filter( | ||
272 | select, self.tag, self._ns(namespaces, select), flags, **kwargs | ||
273 | ) | ||
274 | ) | ||
diff --git a/bitbake/lib/bs4/dammit.py b/bitbake/lib/bs4/dammit.py index 7ad9e0dd1e..692433c57a 100644 --- a/bitbake/lib/bs4/dammit.py +++ b/bitbake/lib/bs4/dammit.py | |||
@@ -6,61 +6,185 @@ necessary. It is heavily based on code from Mark Pilgrim's Universal | |||
6 | Feed Parser. It works best on XML and HTML, but it does not rewrite the | 6 | Feed Parser. It works best on XML and HTML, but it does not rewrite the |
7 | XML or HTML to reflect a new encoding; that's the tree builder's job. | 7 | XML or HTML to reflect a new encoding; that's the tree builder's job. |
8 | """ | 8 | """ |
9 | # Use of this source code is governed by the MIT license. | ||
9 | __license__ = "MIT" | 10 | __license__ = "MIT" |
10 | 11 | ||
11 | import codecs | ||
12 | from html.entities import codepoint2name | 12 | from html.entities import codepoint2name |
13 | from collections import defaultdict | ||
14 | import codecs | ||
13 | import re | 15 | import re |
14 | import logging | 16 | import logging |
15 | 17 | import string | |
16 | # Import a library to autodetect character encodings. | 18 | |
17 | chardet_type = None | 19 | # Import a library to autodetect character encodings. We'll support |
20 | # any of a number of libraries that all support the same API: | ||
21 | # | ||
22 | # * cchardet | ||
23 | # * chardet | ||
24 | # * charset-normalizer | ||
25 | chardet_module = None | ||
18 | try: | 26 | try: |
19 | # First try the fast C implementation. | ||
20 | # PyPI package: cchardet | 27 | # PyPI package: cchardet |
21 | import cchardet | 28 | import cchardet as chardet_module |
22 | def chardet_dammit(s): | ||
23 | return cchardet.detect(s)['encoding'] | ||
24 | except ImportError: | 29 | except ImportError: |
25 | try: | 30 | try: |
26 | # Fall back to the pure Python implementation | ||
27 | # Debian package: python-chardet | 31 | # Debian package: python-chardet |
28 | # PyPI package: chardet | 32 | # PyPI package: chardet |
29 | import chardet | 33 | import chardet as chardet_module |
30 | def chardet_dammit(s): | ||
31 | return chardet.detect(s)['encoding'] | ||
32 | #import chardet.constants | ||
33 | #chardet.constants._debug = 1 | ||
34 | except ImportError: | 34 | except ImportError: |
35 | # No chardet available. | 35 | try: |
36 | def chardet_dammit(s): | 36 | # PyPI package: charset-normalizer |
37 | import charset_normalizer as chardet_module | ||
38 | except ImportError: | ||
39 | # No chardet available. | ||
40 | chardet_module = None | ||
41 | |||
42 | if chardet_module: | ||
43 | def chardet_dammit(s): | ||
44 | if isinstance(s, str): | ||
37 | return None | 45 | return None |
46 | return chardet_module.detect(s)['encoding'] | ||
47 | else: | ||
48 | def chardet_dammit(s): | ||
49 | return None | ||
38 | 50 | ||
39 | xml_encoding_re = re.compile( | 51 | # Build bytestring and Unicode versions of regular expressions for finding |
40 | r'^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I) | 52 | # a declared encoding inside an XML or HTML document. |
41 | html_meta_re = re.compile( | 53 | xml_encoding = '^\\s*<\\?.*encoding=[\'"](.*?)[\'"].*\\?>' |
42 | r'<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I) | 54 | html_meta = '<\\s*meta[^>]+charset\\s*=\\s*["\']?([^>]*?)[ /;\'">]' |
55 | encoding_res = dict() | ||
56 | encoding_res[bytes] = { | ||
57 | 'html' : re.compile(html_meta.encode("ascii"), re.I), | ||
58 | 'xml' : re.compile(xml_encoding.encode("ascii"), re.I), | ||
59 | } | ||
60 | encoding_res[str] = { | ||
61 | 'html' : re.compile(html_meta, re.I), | ||
62 | 'xml' : re.compile(xml_encoding, re.I) | ||
63 | } | ||
64 | |||
65 | from html.entities import html5 | ||
43 | 66 | ||
44 | class EntitySubstitution(object): | 67 | class EntitySubstitution(object): |
45 | 68 | """The ability to substitute XML or HTML entities for certain characters.""" | |
46 | """Substitute XML or HTML entities for the corresponding characters.""" | ||
47 | 69 | ||
48 | def _populate_class_variables(): | 70 | def _populate_class_variables(): |
49 | lookup = {} | 71 | """Initialize variables used by this class to manage the plethora of |
50 | reverse_lookup = {} | 72 | HTML5 named entities. |
51 | characters_for_re = [] | 73 | |
74 | This function returns a 3-tuple containing two dictionaries | ||
75 | and a regular expression: | ||
76 | |||
77 | unicode_to_name - A mapping of Unicode strings like "⦨" to | ||
78 | entity names like "angmsdaa". When a single Unicode string has | ||
79 | multiple entity names, we try to choose the most commonly-used | ||
80 | name. | ||
81 | |||
82 | name_to_unicode: A mapping of entity names like "angmsdaa" to | ||
83 | Unicode strings like "⦨". | ||
84 | |||
85 | named_entity_re: A regular expression matching (almost) any | ||
86 | Unicode string that corresponds to an HTML5 named entity. | ||
87 | """ | ||
88 | unicode_to_name = {} | ||
89 | name_to_unicode = {} | ||
90 | |||
91 | short_entities = set() | ||
92 | long_entities_by_first_character = defaultdict(set) | ||
93 | |||
94 | for name_with_semicolon, character in sorted(html5.items()): | ||
95 | # "It is intentional, for legacy compatibility, that many | ||
96 | # code points have multiple character reference names. For | ||
97 | # example, some appear both with and without the trailing | ||
98 | # semicolon, or with different capitalizations." | ||
99 | # - https://html.spec.whatwg.org/multipage/named-characters.html#named-character-references | ||
100 | # | ||
101 | # The parsers are in charge of handling (or not) character | ||
102 | # references with no trailing semicolon, so we remove the | ||
103 | # semicolon whenever it appears. | ||
104 | if name_with_semicolon.endswith(';'): | ||
105 | name = name_with_semicolon[:-1] | ||
106 | else: | ||
107 | name = name_with_semicolon | ||
108 | |||
109 | # When parsing HTML, we want to recognize any known named | ||
110 | # entity and convert it to a sequence of Unicode | ||
111 | # characters. | ||
112 | if name not in name_to_unicode: | ||
113 | name_to_unicode[name] = character | ||
114 | |||
115 | # When _generating_ HTML, we want to recognize special | ||
116 | # character sequences that _could_ be converted to named | ||
117 | # entities. | ||
118 | unicode_to_name[character] = name | ||
119 | |||
120 | # We also need to build a regular expression that lets us | ||
121 | # _find_ those characters in output strings so we can | ||
122 | # replace them. | ||
123 | # | ||
124 | # This is tricky, for two reasons. | ||
125 | |||
126 | if (len(character) == 1 and ord(character) < 128 | ||
127 | and character not in '<>&'): | ||
128 | # First, it would be annoying to turn single ASCII | ||
129 | # characters like | into named entities like | ||
130 | # |. The exceptions are <>&, which we _must_ | ||
131 | # turn into named entities to produce valid HTML. | ||
132 | continue | ||
133 | |||
134 | if len(character) > 1 and all(ord(x) < 128 for x in character): | ||
135 | # We also do not want to turn _combinations_ of ASCII | ||
136 | # characters like 'fj' into named entities like 'fj', | ||
137 | # though that's more debateable. | ||
138 | continue | ||
139 | |||
140 | # Second, some named entities have a Unicode value that's | ||
141 | # a subset of the Unicode value for some _other_ named | ||
142 | # entity. As an example, \u2267' is ≧, | ||
143 | # but '\u2267\u0338' is ≧̸. Our regular | ||
144 | # expression needs to match the first two characters of | ||
145 | # "\u2267\u0338foo", but only the first character of | ||
146 | # "\u2267foo". | ||
147 | # | ||
148 | # In this step, we build two sets of characters that | ||
149 | # _eventually_ need to go into the regular expression. But | ||
150 | # we won't know exactly what the regular expression needs | ||
151 | # to look like until we've gone through the entire list of | ||
152 | # named entities. | ||
153 | if len(character) == 1: | ||
154 | short_entities.add(character) | ||
155 | else: | ||
156 | long_entities_by_first_character[character[0]].add(character) | ||
157 | |||
158 | # Now that we've been through the entire list of entities, we | ||
159 | # can create a regular expression that matches any of them. | ||
160 | particles = set() | ||
161 | for short in short_entities: | ||
162 | long_versions = long_entities_by_first_character[short] | ||
163 | if not long_versions: | ||
164 | particles.add(short) | ||
165 | else: | ||
166 | ignore = "".join([x[1] for x in long_versions]) | ||
167 | # This finds, e.g. \u2267 but only if it is _not_ | ||
168 | # followed by \u0338. | ||
169 | particles.add("%s(?![%s])" % (short, ignore)) | ||
170 | |||
171 | for long_entities in list(long_entities_by_first_character.values()): | ||
172 | for long_entity in long_entities: | ||
173 | particles.add(long_entity) | ||
174 | |||
175 | re_definition = "(%s)" % "|".join(particles) | ||
176 | |||
177 | # If an entity shows up in both html5 and codepoint2name, it's | ||
178 | # likely that HTML5 gives it several different names, such as | ||
179 | # 'rsquo' and 'rsquor'. When converting Unicode characters to | ||
180 | # named entities, the codepoint2name name should take | ||
181 | # precedence where possible, since that's the more easily | ||
182 | # recognizable one. | ||
52 | for codepoint, name in list(codepoint2name.items()): | 183 | for codepoint, name in list(codepoint2name.items()): |
53 | character = chr(codepoint) | 184 | character = chr(codepoint) |
54 | if codepoint != 34: | 185 | unicode_to_name[character] = name |
55 | # There's no point in turning the quotation mark into | 186 | |
56 | # ", unless it happens within an attribute value, which | 187 | return unicode_to_name, name_to_unicode, re.compile(re_definition) |
57 | # is handled elsewhere. | ||
58 | characters_for_re.append(character) | ||
59 | lookup[character] = name | ||
60 | # But we do want to turn " into the quotation mark. | ||
61 | reverse_lookup[name] = character | ||
62 | re_definition = "[%s]" % "".join(characters_for_re) | ||
63 | return lookup, reverse_lookup, re.compile(re_definition) | ||
64 | (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, | 188 | (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER, |
65 | CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() | 189 | CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables() |
66 | 190 | ||
@@ -72,21 +196,23 @@ class EntitySubstitution(object): | |||
72 | ">": "gt", | 196 | ">": "gt", |
73 | } | 197 | } |
74 | 198 | ||
75 | BARE_AMPERSAND_OR_BRACKET = re.compile(r"([<>]|" | 199 | BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|" |
76 | r"&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)" | 200 | "&(?!#\\d+;|#x[0-9a-fA-F]+;|\\w+;)" |
77 | r")") | 201 | ")") |
78 | 202 | ||
79 | AMPERSAND_OR_BRACKET = re.compile(r"([<>&])") | 203 | AMPERSAND_OR_BRACKET = re.compile("([<>&])") |
80 | 204 | ||
81 | @classmethod | 205 | @classmethod |
82 | def _substitute_html_entity(cls, matchobj): | 206 | def _substitute_html_entity(cls, matchobj): |
207 | """Used with a regular expression to substitute the | ||
208 | appropriate HTML entity for a special character string.""" | ||
83 | entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) | 209 | entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0)) |
84 | return "&%s;" % entity | 210 | return "&%s;" % entity |
85 | 211 | ||
86 | @classmethod | 212 | @classmethod |
87 | def _substitute_xml_entity(cls, matchobj): | 213 | def _substitute_xml_entity(cls, matchobj): |
88 | """Used with a regular expression to substitute the | 214 | """Used with a regular expression to substitute the |
89 | appropriate XML entity for an XML special character.""" | 215 | appropriate XML entity for a special character string.""" |
90 | entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] | 216 | entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)] |
91 | return "&%s;" % entity | 217 | return "&%s;" % entity |
92 | 218 | ||
@@ -181,6 +307,8 @@ class EntitySubstitution(object): | |||
181 | containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that | 307 | containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that |
182 | character with "é" will make it more readable to some | 308 | character with "é" will make it more readable to some |
183 | people. | 309 | people. |
310 | |||
311 | :param s: A Unicode string. | ||
184 | """ | 312 | """ |
185 | return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( | 313 | return cls.CHARACTER_TO_HTML_ENTITY_RE.sub( |
186 | cls._substitute_html_entity, s) | 314 | cls._substitute_html_entity, s) |
@@ -192,23 +320,65 @@ class EncodingDetector: | |||
192 | Order of precedence: | 320 | Order of precedence: |
193 | 321 | ||
194 | 1. Encodings you specifically tell EncodingDetector to try first | 322 | 1. Encodings you specifically tell EncodingDetector to try first |
195 | (the override_encodings argument to the constructor). | 323 | (the known_definite_encodings argument to the constructor). |
324 | |||
325 | 2. An encoding determined by sniffing the document's byte-order mark. | ||
326 | |||
327 | 3. Encodings you specifically tell EncodingDetector to try if | ||
328 | byte-order mark sniffing fails (the user_encodings argument to the | ||
329 | constructor). | ||
196 | 330 | ||
197 | 2. An encoding declared within the bytestring itself, either in an | 331 | 4. An encoding declared within the bytestring itself, either in an |
198 | XML declaration (if the bytestring is to be interpreted as an XML | 332 | XML declaration (if the bytestring is to be interpreted as an XML |
199 | document), or in a <meta> tag (if the bytestring is to be | 333 | document), or in a <meta> tag (if the bytestring is to be |
200 | interpreted as an HTML document.) | 334 | interpreted as an HTML document.) |
201 | 335 | ||
202 | 3. An encoding detected through textual analysis by chardet, | 336 | 5. An encoding detected through textual analysis by chardet, |
203 | cchardet, or a similar external library. | 337 | cchardet, or a similar external library. |
204 | 338 | ||
205 | 4. UTF-8. | 339 | 4. UTF-8. |
206 | 340 | ||
207 | 5. Windows-1252. | 341 | 5. Windows-1252. |
342 | |||
208 | """ | 343 | """ |
209 | def __init__(self, markup, override_encodings=None, is_html=False, | 344 | def __init__(self, markup, known_definite_encodings=None, |
210 | exclude_encodings=None): | 345 | is_html=False, exclude_encodings=None, |
211 | self.override_encodings = override_encodings or [] | 346 | user_encodings=None, override_encodings=None): |
347 | """Constructor. | ||
348 | |||
349 | :param markup: Some markup in an unknown encoding. | ||
350 | |||
351 | :param known_definite_encodings: When determining the encoding | ||
352 | of `markup`, these encodings will be tried first, in | ||
353 | order. In HTML terms, this corresponds to the "known | ||
354 | definite encoding" step defined here: | ||
355 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
356 | |||
357 | :param user_encodings: These encodings will be tried after the | ||
358 | `known_definite_encodings` have been tried and failed, and | ||
359 | after an attempt to sniff the encoding by looking at a | ||
360 | byte order mark has failed. In HTML terms, this | ||
361 | corresponds to the step "user has explicitly instructed | ||
362 | the user agent to override the document's character | ||
363 | encoding", defined here: | ||
364 | https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding | ||
365 | |||
366 | :param override_encodings: A deprecated alias for | ||
367 | known_definite_encodings. Any encodings here will be tried | ||
368 | immediately after the encodings in | ||
369 | known_definite_encodings. | ||
370 | |||
371 | :param is_html: If True, this markup is considered to be | ||
372 | HTML. Otherwise it's assumed to be XML. | ||
373 | |||
374 | :param exclude_encodings: These encodings will not be tried, | ||
375 | even if they otherwise would be. | ||
376 | |||
377 | """ | ||
378 | self.known_definite_encodings = list(known_definite_encodings or []) | ||
379 | if override_encodings: | ||
380 | self.known_definite_encodings += override_encodings | ||
381 | self.user_encodings = user_encodings or [] | ||
212 | exclude_encodings = exclude_encodings or [] | 382 | exclude_encodings = exclude_encodings or [] |
213 | self.exclude_encodings = set([x.lower() for x in exclude_encodings]) | 383 | self.exclude_encodings = set([x.lower() for x in exclude_encodings]) |
214 | self.chardet_encoding = None | 384 | self.chardet_encoding = None |
@@ -219,6 +389,12 @@ class EncodingDetector: | |||
219 | self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) | 389 | self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup) |
220 | 390 | ||
221 | def _usable(self, encoding, tried): | 391 | def _usable(self, encoding, tried): |
392 | """Should we even bother to try this encoding? | ||
393 | |||
394 | :param encoding: Name of an encoding. | ||
395 | :param tried: Encodings that have already been tried. This will be modified | ||
396 | as a side effect. | ||
397 | """ | ||
222 | if encoding is not None: | 398 | if encoding is not None: |
223 | encoding = encoding.lower() | 399 | encoding = encoding.lower() |
224 | if encoding in self.exclude_encodings: | 400 | if encoding in self.exclude_encodings: |
@@ -230,9 +406,14 @@ class EncodingDetector: | |||
230 | 406 | ||
231 | @property | 407 | @property |
232 | def encodings(self): | 408 | def encodings(self): |
233 | """Yield a number of encodings that might work for this markup.""" | 409 | """Yield a number of encodings that might work for this markup. |
410 | |||
411 | :yield: A sequence of strings. | ||
412 | """ | ||
234 | tried = set() | 413 | tried = set() |
235 | for e in self.override_encodings: | 414 | |
415 | # First, try the known definite encodings | ||
416 | for e in self.known_definite_encodings: | ||
236 | if self._usable(e, tried): | 417 | if self._usable(e, tried): |
237 | yield e | 418 | yield e |
238 | 419 | ||
@@ -241,6 +422,12 @@ class EncodingDetector: | |||
241 | if self._usable(self.sniffed_encoding, tried): | 422 | if self._usable(self.sniffed_encoding, tried): |
242 | yield self.sniffed_encoding | 423 | yield self.sniffed_encoding |
243 | 424 | ||
425 | # Sniffing the byte-order mark did nothing; try the user | ||
426 | # encodings. | ||
427 | for e in self.user_encodings: | ||
428 | if self._usable(e, tried): | ||
429 | yield e | ||
430 | |||
244 | # Look within the document for an XML or HTML encoding | 431 | # Look within the document for an XML or HTML encoding |
245 | # declaration. | 432 | # declaration. |
246 | if self.declared_encoding is None: | 433 | if self.declared_encoding is None: |
@@ -263,7 +450,11 @@ class EncodingDetector: | |||
263 | 450 | ||
264 | @classmethod | 451 | @classmethod |
265 | def strip_byte_order_mark(cls, data): | 452 | def strip_byte_order_mark(cls, data): |
266 | """If a byte-order mark is present, strip it and return the encoding it implies.""" | 453 | """If a byte-order mark is present, strip it and return the encoding it implies. |
454 | |||
455 | :param data: Some markup. | ||
456 | :return: A 2-tuple (modified data, implied encoding) | ||
457 | """ | ||
267 | encoding = None | 458 | encoding = None |
268 | if isinstance(data, str): | 459 | if isinstance(data, str): |
269 | # Unicode data cannot have a byte-order mark. | 460 | # Unicode data cannot have a byte-order mark. |
@@ -295,21 +486,36 @@ class EncodingDetector: | |||
295 | 486 | ||
296 | An HTML encoding is declared in a <meta> tag, hopefully near the | 487 | An HTML encoding is declared in a <meta> tag, hopefully near the |
297 | beginning of the document. | 488 | beginning of the document. |
489 | |||
490 | :param markup: Some markup. | ||
491 | :param is_html: If True, this markup is considered to be HTML. Otherwise | ||
492 | it's assumed to be XML. | ||
493 | :param search_entire_document: Since an encoding is supposed to declared near the beginning | ||
494 | of the document, most of the time it's only necessary to search a few kilobytes of data. | ||
495 | Set this to True to force this method to search the entire document. | ||
298 | """ | 496 | """ |
299 | if search_entire_document: | 497 | if search_entire_document: |
300 | xml_endpos = html_endpos = len(markup) | 498 | xml_endpos = html_endpos = len(markup) |
301 | else: | 499 | else: |
302 | xml_endpos = 1024 | 500 | xml_endpos = 1024 |
303 | html_endpos = max(2048, int(len(markup) * 0.05)) | 501 | html_endpos = max(2048, int(len(markup) * 0.05)) |
304 | 502 | ||
503 | if isinstance(markup, bytes): | ||
504 | res = encoding_res[bytes] | ||
505 | else: | ||
506 | res = encoding_res[str] | ||
507 | |||
508 | xml_re = res['xml'] | ||
509 | html_re = res['html'] | ||
305 | declared_encoding = None | 510 | declared_encoding = None |
306 | declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos) | 511 | declared_encoding_match = xml_re.search(markup, endpos=xml_endpos) |
307 | if not declared_encoding_match and is_html: | 512 | if not declared_encoding_match and is_html: |
308 | declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos) | 513 | declared_encoding_match = html_re.search(markup, endpos=html_endpos) |
309 | if declared_encoding_match is not None: | 514 | if declared_encoding_match is not None: |
310 | declared_encoding = declared_encoding_match.groups()[0].decode( | 515 | declared_encoding = declared_encoding_match.groups()[0] |
311 | 'ascii', 'replace') | ||
312 | if declared_encoding: | 516 | if declared_encoding: |
517 | if isinstance(declared_encoding, bytes): | ||
518 | declared_encoding = declared_encoding.decode('ascii', 'replace') | ||
313 | return declared_encoding.lower() | 519 | return declared_encoding.lower() |
314 | return None | 520 | return None |
315 | 521 | ||
@@ -332,15 +538,53 @@ class UnicodeDammit: | |||
332 | "iso-8859-2", | 538 | "iso-8859-2", |
333 | ] | 539 | ] |
334 | 540 | ||
335 | def __init__(self, markup, override_encodings=[], | 541 | def __init__(self, markup, known_definite_encodings=[], |
336 | smart_quotes_to=None, is_html=False, exclude_encodings=[]): | 542 | smart_quotes_to=None, is_html=False, exclude_encodings=[], |
543 | user_encodings=None, override_encodings=None | ||
544 | ): | ||
545 | """Constructor. | ||
546 | |||
547 | :param markup: A bytestring representing markup in an unknown encoding. | ||
548 | |||
549 | :param known_definite_encodings: When determining the encoding | ||
550 | of `markup`, these encodings will be tried first, in | ||
551 | order. In HTML terms, this corresponds to the "known | ||
552 | definite encoding" step defined here: | ||
553 | https://html.spec.whatwg.org/multipage/parsing.html#parsing-with-a-known-character-encoding | ||
554 | |||
555 | :param user_encodings: These encodings will be tried after the | ||
556 | `known_definite_encodings` have been tried and failed, and | ||
557 | after an attempt to sniff the encoding by looking at a | ||
558 | byte order mark has failed. In HTML terms, this | ||
559 | corresponds to the step "user has explicitly instructed | ||
560 | the user agent to override the document's character | ||
561 | encoding", defined here: | ||
562 | https://html.spec.whatwg.org/multipage/parsing.html#determining-the-character-encoding | ||
563 | |||
564 | :param override_encodings: A deprecated alias for | ||
565 | known_definite_encodings. Any encodings here will be tried | ||
566 | immediately after the encodings in | ||
567 | known_definite_encodings. | ||
568 | |||
569 | :param smart_quotes_to: By default, Microsoft smart quotes will, like all other characters, be converted | ||
570 | to Unicode characters. Setting this to 'ascii' will convert them to ASCII quotes instead. | ||
571 | Setting it to 'xml' will convert them to XML entity references, and setting it to 'html' | ||
572 | will convert them to HTML entity references. | ||
573 | :param is_html: If True, this markup is considered to be HTML. Otherwise | ||
574 | it's assumed to be XML. | ||
575 | :param exclude_encodings: These encodings will not be considered, even | ||
576 | if the sniffing code thinks they might make sense. | ||
577 | |||
578 | """ | ||
337 | self.smart_quotes_to = smart_quotes_to | 579 | self.smart_quotes_to = smart_quotes_to |
338 | self.tried_encodings = [] | 580 | self.tried_encodings = [] |
339 | self.contains_replacement_characters = False | 581 | self.contains_replacement_characters = False |
340 | self.is_html = is_html | 582 | self.is_html = is_html |
341 | 583 | self.log = logging.getLogger(__name__) | |
342 | self.detector = EncodingDetector( | 584 | self.detector = EncodingDetector( |
343 | markup, override_encodings, is_html, exclude_encodings) | 585 | markup, known_definite_encodings, is_html, exclude_encodings, |
586 | user_encodings, override_encodings | ||
587 | ) | ||
344 | 588 | ||
345 | # Short-circuit if the data is in Unicode to begin with. | 589 | # Short-circuit if the data is in Unicode to begin with. |
346 | if isinstance(markup, str) or markup == '': | 590 | if isinstance(markup, str) or markup == '': |
@@ -368,9 +612,10 @@ class UnicodeDammit: | |||
368 | if encoding != "ascii": | 612 | if encoding != "ascii": |
369 | u = self._convert_from(encoding, "replace") | 613 | u = self._convert_from(encoding, "replace") |
370 | if u is not None: | 614 | if u is not None: |
371 | logging.warning( | 615 | self.log.warning( |
372 | "Some characters could not be decoded, and were " | 616 | "Some characters could not be decoded, and were " |
373 | "replaced with REPLACEMENT CHARACTER.") | 617 | "replaced with REPLACEMENT CHARACTER." |
618 | ) | ||
374 | self.contains_replacement_characters = True | 619 | self.contains_replacement_characters = True |
375 | break | 620 | break |
376 | 621 | ||
@@ -399,6 +644,10 @@ class UnicodeDammit: | |||
399 | return sub | 644 | return sub |
400 | 645 | ||
401 | def _convert_from(self, proposed, errors="strict"): | 646 | def _convert_from(self, proposed, errors="strict"): |
647 | """Attempt to convert the markup to the proposed encoding. | ||
648 | |||
649 | :param proposed: The name of a character encoding. | ||
650 | """ | ||
402 | proposed = self.find_codec(proposed) | 651 | proposed = self.find_codec(proposed) |
403 | if not proposed or (proposed, errors) in self.tried_encodings: | 652 | if not proposed or (proposed, errors) in self.tried_encodings: |
404 | return None | 653 | return None |
@@ -413,30 +662,40 @@ class UnicodeDammit: | |||
413 | markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) | 662 | markup = smart_quotes_compiled.sub(self._sub_ms_char, markup) |
414 | 663 | ||
415 | try: | 664 | try: |
416 | #print "Trying to convert document to %s (errors=%s)" % ( | 665 | #print("Trying to convert document to %s (errors=%s)" % ( |
417 | # proposed, errors) | 666 | # proposed, errors)) |
418 | u = self._to_unicode(markup, proposed, errors) | 667 | u = self._to_unicode(markup, proposed, errors) |
419 | self.markup = u | 668 | self.markup = u |
420 | self.original_encoding = proposed | 669 | self.original_encoding = proposed |
421 | except Exception as e: | 670 | except Exception as e: |
422 | #print "That didn't work!" | 671 | #print("That didn't work!") |
423 | #print e | 672 | #print(e) |
424 | return None | 673 | return None |
425 | #print "Correct encoding: %s" % proposed | 674 | #print("Correct encoding: %s" % proposed) |
426 | return self.markup | 675 | return self.markup |
427 | 676 | ||
428 | def _to_unicode(self, data, encoding, errors="strict"): | 677 | def _to_unicode(self, data, encoding, errors="strict"): |
429 | '''Given a string and its encoding, decodes the string into Unicode. | 678 | """Given a string and its encoding, decodes the string into Unicode. |
430 | %encoding is a string recognized by encodings.aliases''' | 679 | |
680 | :param encoding: The name of an encoding. | ||
681 | """ | ||
431 | return str(data, encoding, errors) | 682 | return str(data, encoding, errors) |
432 | 683 | ||
433 | @property | 684 | @property |
434 | def declared_html_encoding(self): | 685 | def declared_html_encoding(self): |
686 | """If the markup is an HTML document, returns the encoding declared _within_ | ||
687 | the document. | ||
688 | """ | ||
435 | if not self.is_html: | 689 | if not self.is_html: |
436 | return None | 690 | return None |
437 | return self.detector.declared_encoding | 691 | return self.detector.declared_encoding |
438 | 692 | ||
439 | def find_codec(self, charset): | 693 | def find_codec(self, charset): |
694 | """Convert the name of a character set to a codec name. | ||
695 | |||
696 | :param charset: The name of a character set. | ||
697 | :return: The name of a codec. | ||
698 | """ | ||
440 | value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) | 699 | value = (self._codec(self.CHARSET_ALIASES.get(charset, charset)) |
441 | or (charset and self._codec(charset.replace("-", ""))) | 700 | or (charset and self._codec(charset.replace("-", ""))) |
442 | or (charset and self._codec(charset.replace("-", "_"))) | 701 | or (charset and self._codec(charset.replace("-", "_"))) |
@@ -726,7 +985,7 @@ class UnicodeDammit: | |||
726 | 0xde : b'\xc3\x9e', # Þ | 985 | 0xde : b'\xc3\x9e', # Þ |
727 | 0xdf : b'\xc3\x9f', # ß | 986 | 0xdf : b'\xc3\x9f', # ß |
728 | 0xe0 : b'\xc3\xa0', # Ã | 987 | 0xe0 : b'\xc3\xa0', # Ã |
729 | 0xe1 : b'\xa1', # á | 988 | 0xe1 : b'\xa1', # á |
730 | 0xe2 : b'\xc3\xa2', # â | 989 | 0xe2 : b'\xc3\xa2', # â |
731 | 0xe3 : b'\xc3\xa3', # ã | 990 | 0xe3 : b'\xc3\xa3', # ã |
732 | 0xe4 : b'\xc3\xa4', # ä | 991 | 0xe4 : b'\xc3\xa4', # ä |
@@ -775,12 +1034,16 @@ class UnicodeDammit: | |||
775 | Currently the only situation supported is Windows-1252 (or its | 1034 | Currently the only situation supported is Windows-1252 (or its |
776 | subset ISO-8859-1), embedded in UTF-8. | 1035 | subset ISO-8859-1), embedded in UTF-8. |
777 | 1036 | ||
778 | The input must be a bytestring. If you've already converted | 1037 | :param in_bytes: A bytestring that you suspect contains |
779 | the document to Unicode, you're too late. | 1038 | characters from multiple encodings. Note that this _must_ |
780 | 1039 | be a bytestring. If you've already converted the document | |
781 | The output is a bytestring in which `embedded_encoding` | 1040 | to Unicode, you're too late. |
782 | characters have been converted to their `main_encoding` | 1041 | :param main_encoding: The primary encoding of `in_bytes`. |
783 | equivalents. | 1042 | :param embedded_encoding: The encoding that was used to embed characters |
1043 | in the main document. | ||
1044 | :return: A bytestring in which `embedded_encoding` | ||
1045 | characters have been converted to their `main_encoding` | ||
1046 | equivalents. | ||
784 | """ | 1047 | """ |
785 | if embedded_encoding.replace('_', '-').lower() not in ( | 1048 | if embedded_encoding.replace('_', '-').lower() not in ( |
786 | 'windows-1252', 'windows_1252'): | 1049 | 'windows-1252', 'windows_1252'): |
diff --git a/bitbake/lib/bs4/diagnose.py b/bitbake/lib/bs4/diagnose.py index 083395fb46..e079772e69 100644 --- a/bitbake/lib/bs4/diagnose.py +++ b/bitbake/lib/bs4/diagnose.py | |||
@@ -1,9 +1,10 @@ | |||
1 | """Diagnostic functions, mainly for use when doing tech support.""" | 1 | """Diagnostic functions, mainly for use when doing tech support.""" |
2 | 2 | ||
3 | # Use of this source code is governed by the MIT license. | ||
3 | __license__ = "MIT" | 4 | __license__ = "MIT" |
4 | 5 | ||
5 | import cProfile | 6 | import cProfile |
6 | from io import StringIO | 7 | from io import BytesIO |
7 | from html.parser import HTMLParser | 8 | from html.parser import HTMLParser |
8 | import bs4 | 9 | import bs4 |
9 | from bs4 import BeautifulSoup, __version__ | 10 | from bs4 import BeautifulSoup, __version__ |
@@ -19,9 +20,13 @@ import sys | |||
19 | import cProfile | 20 | import cProfile |
20 | 21 | ||
21 | def diagnose(data): | 22 | def diagnose(data): |
22 | """Diagnostic suite for isolating common problems.""" | 23 | """Diagnostic suite for isolating common problems. |
23 | print("Diagnostic running on Beautiful Soup %s" % __version__) | 24 | |
24 | print("Python version %s" % sys.version) | 25 | :param data: A string containing markup that needs to be explained. |
26 | :return: None; diagnostics are printed to standard output. | ||
27 | """ | ||
28 | print(("Diagnostic running on Beautiful Soup %s" % __version__)) | ||
29 | print(("Python version %s" % sys.version)) | ||
25 | 30 | ||
26 | basic_parsers = ["html.parser", "html5lib", "lxml"] | 31 | basic_parsers = ["html.parser", "html5lib", "lxml"] |
27 | for name in basic_parsers: | 32 | for name in basic_parsers: |
@@ -35,61 +40,70 @@ def diagnose(data): | |||
35 | name)) | 40 | name)) |
36 | 41 | ||
37 | if 'lxml' in basic_parsers: | 42 | if 'lxml' in basic_parsers: |
38 | basic_parsers.append(["lxml", "xml"]) | 43 | basic_parsers.append("lxml-xml") |
39 | try: | 44 | try: |
40 | from lxml import etree | 45 | from lxml import etree |
41 | print("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))) | 46 | print(("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)))) |
42 | except ImportError as e: | 47 | except ImportError as e: |
43 | print ( | 48 | print( |
44 | "lxml is not installed or couldn't be imported.") | 49 | "lxml is not installed or couldn't be imported.") |
45 | 50 | ||
46 | 51 | ||
47 | if 'html5lib' in basic_parsers: | 52 | if 'html5lib' in basic_parsers: |
48 | try: | 53 | try: |
49 | import html5lib | 54 | import html5lib |
50 | print("Found html5lib version %s" % html5lib.__version__) | 55 | print(("Found html5lib version %s" % html5lib.__version__)) |
51 | except ImportError as e: | 56 | except ImportError as e: |
52 | print ( | 57 | print( |
53 | "html5lib is not installed or couldn't be imported.") | 58 | "html5lib is not installed or couldn't be imported.") |
54 | 59 | ||
55 | if hasattr(data, 'read'): | 60 | if hasattr(data, 'read'): |
56 | data = data.read() | 61 | data = data.read() |
57 | elif os.path.exists(data): | ||
58 | print('"%s" looks like a filename. Reading data from the file.' % data) | ||
59 | data = open(data).read() | ||
60 | elif data.startswith("http:") or data.startswith("https:"): | ||
61 | print('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data) | ||
62 | print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.") | ||
63 | return | ||
64 | print() | ||
65 | 62 | ||
66 | for parser in basic_parsers: | 63 | for parser in basic_parsers: |
67 | print("Trying to parse your markup with %s" % parser) | 64 | print(("Trying to parse your markup with %s" % parser)) |
68 | success = False | 65 | success = False |
69 | try: | 66 | try: |
70 | soup = BeautifulSoup(data, parser) | 67 | soup = BeautifulSoup(data, features=parser) |
71 | success = True | 68 | success = True |
72 | except Exception as e: | 69 | except Exception as e: |
73 | print("%s could not parse the markup." % parser) | 70 | print(("%s could not parse the markup." % parser)) |
74 | traceback.print_exc() | 71 | traceback.print_exc() |
75 | if success: | 72 | if success: |
76 | print("Here's what %s did with the markup:" % parser) | 73 | print(("Here's what %s did with the markup:" % parser)) |
77 | print(soup.prettify()) | 74 | print((soup.prettify())) |
78 | 75 | ||
79 | print("-" * 80) | 76 | print(("-" * 80)) |
80 | 77 | ||
81 | def lxml_trace(data, html=True, **kwargs): | 78 | def lxml_trace(data, html=True, **kwargs): |
82 | """Print out the lxml events that occur during parsing. | 79 | """Print out the lxml events that occur during parsing. |
83 | 80 | ||
84 | This lets you see how lxml parses a document when no Beautiful | 81 | This lets you see how lxml parses a document when no Beautiful |
85 | Soup code is running. | 82 | Soup code is running. You can use this to determine whether |
83 | an lxml-specific problem is in Beautiful Soup's lxml tree builders | ||
84 | or in lxml itself. | ||
85 | |||
86 | :param data: Some markup. | ||
87 | :param html: If True, markup will be parsed with lxml's HTML parser. | ||
88 | if False, lxml's XML parser will be used. | ||
86 | """ | 89 | """ |
87 | from lxml import etree | 90 | from lxml import etree |
88 | for event, element in etree.iterparse(StringIO(data), html=html, **kwargs): | 91 | recover = kwargs.pop('recover', True) |
92 | if isinstance(data, str): | ||
93 | data = data.encode("utf8") | ||
94 | reader = BytesIO(data) | ||
95 | for event, element in etree.iterparse( | ||
96 | reader, html=html, recover=recover, **kwargs | ||
97 | ): | ||
89 | print(("%s, %4s, %s" % (event, element.tag, element.text))) | 98 | print(("%s, %4s, %s" % (event, element.tag, element.text))) |
90 | 99 | ||
91 | class AnnouncingParser(HTMLParser): | 100 | class AnnouncingParser(HTMLParser): |
92 | """Announces HTMLParser parse events, without doing anything else.""" | 101 | """Subclass of HTMLParser that announces parse events, without doing |
102 | anything else. | ||
103 | |||
104 | You can use this to get a picture of how html.parser sees a given | ||
105 | document. The easiest way to do this is to call `htmlparser_trace`. | ||
106 | """ | ||
93 | 107 | ||
94 | def _p(self, s): | 108 | def _p(self, s): |
95 | print(s) | 109 | print(s) |
@@ -126,6 +140,8 @@ def htmlparser_trace(data): | |||
126 | 140 | ||
127 | This lets you see how HTMLParser parses a document when no | 141 | This lets you see how HTMLParser parses a document when no |
128 | Beautiful Soup code is running. | 142 | Beautiful Soup code is running. |
143 | |||
144 | :param data: Some markup. | ||
129 | """ | 145 | """ |
130 | parser = AnnouncingParser() | 146 | parser = AnnouncingParser() |
131 | parser.feed(data) | 147 | parser.feed(data) |
@@ -168,9 +184,9 @@ def rdoc(num_elements=1000): | |||
168 | 184 | ||
169 | def benchmark_parsers(num_elements=100000): | 185 | def benchmark_parsers(num_elements=100000): |
170 | """Very basic head-to-head performance benchmark.""" | 186 | """Very basic head-to-head performance benchmark.""" |
171 | print("Comparative parser benchmark on Beautiful Soup %s" % __version__) | 187 | print(("Comparative parser benchmark on Beautiful Soup %s" % __version__)) |
172 | data = rdoc(num_elements) | 188 | data = rdoc(num_elements) |
173 | print("Generated a large invalid HTML document (%d bytes)." % len(data)) | 189 | print(("Generated a large invalid HTML document (%d bytes)." % len(data))) |
174 | 190 | ||
175 | for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: | 191 | for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]: |
176 | success = False | 192 | success = False |
@@ -180,26 +196,26 @@ def benchmark_parsers(num_elements=100000): | |||
180 | b = time.time() | 196 | b = time.time() |
181 | success = True | 197 | success = True |
182 | except Exception as e: | 198 | except Exception as e: |
183 | print("%s could not parse the markup." % parser) | 199 | print(("%s could not parse the markup." % parser)) |
184 | traceback.print_exc() | 200 | traceback.print_exc() |
185 | if success: | 201 | if success: |
186 | print("BS4+%s parsed the markup in %.2fs." % (parser, b-a)) | 202 | print(("BS4+%s parsed the markup in %.2fs." % (parser, b-a))) |
187 | 203 | ||
188 | from lxml import etree | 204 | from lxml import etree |
189 | a = time.time() | 205 | a = time.time() |
190 | etree.HTML(data) | 206 | etree.HTML(data) |
191 | b = time.time() | 207 | b = time.time() |
192 | print("Raw lxml parsed the markup in %.2fs." % (b-a)) | 208 | print(("Raw lxml parsed the markup in %.2fs." % (b-a))) |
193 | 209 | ||
194 | import html5lib | 210 | import html5lib |
195 | parser = html5lib.HTMLParser() | 211 | parser = html5lib.HTMLParser() |
196 | a = time.time() | 212 | a = time.time() |
197 | parser.parse(data) | 213 | parser.parse(data) |
198 | b = time.time() | 214 | b = time.time() |
199 | print("Raw html5lib parsed the markup in %.2fs." % (b-a)) | 215 | print(("Raw html5lib parsed the markup in %.2fs." % (b-a))) |
200 | 216 | ||
201 | def profile(num_elements=100000, parser="lxml"): | 217 | def profile(num_elements=100000, parser="lxml"): |
202 | 218 | """Use Python's profiler on a randomly generated document.""" | |
203 | filehandle = tempfile.NamedTemporaryFile() | 219 | filehandle = tempfile.NamedTemporaryFile() |
204 | filename = filehandle.name | 220 | filename = filehandle.name |
205 | 221 | ||
@@ -212,5 +228,6 @@ def profile(num_elements=100000, parser="lxml"): | |||
212 | stats.sort_stats("cumulative") | 228 | stats.sort_stats("cumulative") |
213 | stats.print_stats('_html5lib|bs4', 50) | 229 | stats.print_stats('_html5lib|bs4', 50) |
214 | 230 | ||
231 | # If this file is run as a script, standard input is diagnosed. | ||
215 | if __name__ == '__main__': | 232 | if __name__ == '__main__': |
216 | diagnose(sys.stdin.read()) | 233 | diagnose(sys.stdin.read()) |
diff --git a/bitbake/lib/bs4/element.py b/bitbake/lib/bs4/element.py index 68be42d138..0aefe734b2 100644 --- a/bitbake/lib/bs4/element.py +++ b/bitbake/lib/bs4/element.py | |||
@@ -1,14 +1,27 @@ | |||
1 | # Use of this source code is governed by the MIT license. | ||
1 | __license__ = "MIT" | 2 | __license__ = "MIT" |
2 | 3 | ||
3 | import collections.abc | 4 | try: |
5 | from collections.abc import Callable # Python 3.6 | ||
6 | except ImportError as e: | ||
7 | from collections import Callable | ||
4 | import re | 8 | import re |
5 | import sys | 9 | import sys |
6 | import warnings | 10 | import warnings |
7 | from bs4.dammit import EntitySubstitution | 11 | |
12 | from bs4.css import CSS | ||
13 | from bs4.formatter import ( | ||
14 | Formatter, | ||
15 | HTMLFormatter, | ||
16 | XMLFormatter, | ||
17 | ) | ||
8 | 18 | ||
9 | DEFAULT_OUTPUT_ENCODING = "utf-8" | 19 | DEFAULT_OUTPUT_ENCODING = "utf-8" |
10 | PY3K = (sys.version_info[0] > 2) | ||
11 | 20 | ||
21 | nonwhitespace_re = re.compile(r"\S+") | ||
22 | |||
23 | # NOTE: This isn't used as of 4.7.0. I'm leaving it for a little bit on | ||
24 | # the off chance someone imported it for their own use. | ||
12 | whitespace_re = re.compile(r"\s+") | 25 | whitespace_re = re.compile(r"\s+") |
13 | 26 | ||
14 | def _alias(attr): | 27 | def _alias(attr): |
@@ -23,12 +36,49 @@ def _alias(attr): | |||
23 | return alias | 36 | return alias |
24 | 37 | ||
25 | 38 | ||
39 | # These encodings are recognized by Python (so PageElement.encode | ||
40 | # could theoretically support them) but XML and HTML don't recognize | ||
41 | # them (so they should not show up in an XML or HTML document as that | ||
42 | # document's encoding). | ||
43 | # | ||
44 | # If an XML document is encoded in one of these encodings, no encoding | ||
45 | # will be mentioned in the XML declaration. If an HTML document is | ||
46 | # encoded in one of these encodings, and the HTML document has a | ||
47 | # <meta> tag that mentions an encoding, the encoding will be given as | ||
48 | # the empty string. | ||
49 | # | ||
50 | # Source: | ||
51 | # https://docs.python.org/3/library/codecs.html#python-specific-encodings | ||
52 | PYTHON_SPECIFIC_ENCODINGS = set([ | ||
53 | "idna", | ||
54 | "mbcs", | ||
55 | "oem", | ||
56 | "palmos", | ||
57 | "punycode", | ||
58 | "raw_unicode_escape", | ||
59 | "undefined", | ||
60 | "unicode_escape", | ||
61 | "raw-unicode-escape", | ||
62 | "unicode-escape", | ||
63 | "string-escape", | ||
64 | "string_escape", | ||
65 | ]) | ||
66 | |||
67 | |||
26 | class NamespacedAttribute(str): | 68 | class NamespacedAttribute(str): |
69 | """A namespaced string (e.g. 'xml:lang') that remembers the namespace | ||
70 | ('xml') and the name ('lang') that were used to create it. | ||
71 | """ | ||
27 | 72 | ||
28 | def __new__(cls, prefix, name, namespace=None): | 73 | def __new__(cls, prefix, name=None, namespace=None): |
29 | if name is None: | 74 | if not name: |
75 | # This is the default namespace. Its name "has no value" | ||
76 | # per https://www.w3.org/TR/xml-names/#defaulting | ||
77 | name = None | ||
78 | |||
79 | if not name: | ||
30 | obj = str.__new__(cls, prefix) | 80 | obj = str.__new__(cls, prefix) |
31 | elif prefix is None: | 81 | elif not prefix: |
32 | # Not really namespaced. | 82 | # Not really namespaced. |
33 | obj = str.__new__(cls, name) | 83 | obj = str.__new__(cls, name) |
34 | else: | 84 | else: |
@@ -54,6 +104,11 @@ class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution): | |||
54 | return obj | 104 | return obj |
55 | 105 | ||
56 | def encode(self, encoding): | 106 | def encode(self, encoding): |
107 | """When an HTML document is being encoded to a given encoding, the | ||
108 | value of a meta tag's 'charset' is the name of the encoding. | ||
109 | """ | ||
110 | if encoding in PYTHON_SPECIFIC_ENCODINGS: | ||
111 | return '' | ||
57 | return encoding | 112 | return encoding |
58 | 113 | ||
59 | 114 | ||
@@ -79,118 +134,44 @@ class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution): | |||
79 | return obj | 134 | return obj |
80 | 135 | ||
81 | def encode(self, encoding): | 136 | def encode(self, encoding): |
137 | if encoding in PYTHON_SPECIFIC_ENCODINGS: | ||
138 | return '' | ||
82 | def rewrite(match): | 139 | def rewrite(match): |
83 | return match.group(1) + encoding | 140 | return match.group(1) + encoding |
84 | return self.CHARSET_RE.sub(rewrite, self.original_value) | 141 | return self.CHARSET_RE.sub(rewrite, self.original_value) |
85 | 142 | ||
86 | class HTMLAwareEntitySubstitution(EntitySubstitution): | ||
87 | |||
88 | """Entity substitution rules that are aware of some HTML quirks. | ||
89 | 143 | ||
90 | Specifically, the contents of <script> and <style> tags should not | 144 | class PageElement(object): |
91 | undergo entity substitution. | 145 | """Contains the navigational information for some part of the page: |
146 | that is, its current location in the parse tree. | ||
92 | 147 | ||
93 | Incoming NavigableString objects are checked to see if they're the | 148 | NavigableString, Tag, etc. are all subclasses of PageElement. |
94 | direct children of a <script> or <style> tag. | ||
95 | """ | 149 | """ |
96 | 150 | ||
97 | cdata_containing_tags = set(["script", "style"]) | 151 | # In general, we can't tell just by looking at an element whether |
152 | # it's contained in an XML document or an HTML document. But for | ||
153 | # Tags (q.v.) we can store this information at parse time. | ||
154 | known_xml = None | ||
98 | 155 | ||
99 | preformatted_tags = set(["pre"]) | 156 | def setup(self, parent=None, previous_element=None, next_element=None, |
100 | 157 | previous_sibling=None, next_sibling=None): | |
101 | @classmethod | 158 | """Sets up the initial relations between this element and |
102 | def _substitute_if_appropriate(cls, ns, f): | 159 | other elements. |
103 | if (isinstance(ns, NavigableString) | ||
104 | and ns.parent is not None | ||
105 | and ns.parent.name in cls.cdata_containing_tags): | ||
106 | # Do nothing. | ||
107 | return ns | ||
108 | # Substitute. | ||
109 | return f(ns) | ||
110 | 160 | ||
111 | @classmethod | 161 | :param parent: The parent of this element. |
112 | def substitute_html(cls, ns): | ||
113 | return cls._substitute_if_appropriate( | ||
114 | ns, EntitySubstitution.substitute_html) | ||
115 | 162 | ||
116 | @classmethod | 163 | :param previous_element: The element parsed immediately before |
117 | def substitute_xml(cls, ns): | 164 | this one. |
118 | return cls._substitute_if_appropriate( | ||
119 | ns, EntitySubstitution.substitute_xml) | ||
120 | 165 | ||
121 | class PageElement(object): | 166 | :param next_element: The element parsed immediately before |
122 | """Contains the navigational information for some part of the page | 167 | this one. |
123 | (either a tag or a piece of text)""" | ||
124 | |||
125 | # There are five possible values for the "formatter" argument passed in | ||
126 | # to methods like encode() and prettify(): | ||
127 | # | ||
128 | # "html" - All Unicode characters with corresponding HTML entities | ||
129 | # are converted to those entities on output. | ||
130 | # "minimal" - Bare ampersands and angle brackets are converted to | ||
131 | # XML entities: & < > | ||
132 | # None - The null formatter. Unicode characters are never | ||
133 | # converted to entities. This is not recommended, but it's | ||
134 | # faster than "minimal". | ||
135 | # A function - This function will be called on every string that | ||
136 | # needs to undergo entity substitution. | ||
137 | # | ||
138 | |||
139 | # In an HTML document, the default "html" and "minimal" functions | ||
140 | # will leave the contents of <script> and <style> tags alone. For | ||
141 | # an XML document, all tags will be given the same treatment. | ||
142 | |||
143 | HTML_FORMATTERS = { | ||
144 | "html" : HTMLAwareEntitySubstitution.substitute_html, | ||
145 | "minimal" : HTMLAwareEntitySubstitution.substitute_xml, | ||
146 | None : None | ||
147 | } | ||
148 | |||
149 | XML_FORMATTERS = { | ||
150 | "html" : EntitySubstitution.substitute_html, | ||
151 | "minimal" : EntitySubstitution.substitute_xml, | ||
152 | None : None | ||
153 | } | ||
154 | |||
155 | def format_string(self, s, formatter='minimal'): | ||
156 | """Format the given string using the given formatter.""" | ||
157 | if not isinstance(formatter, collections.abc.Callable): | ||
158 | formatter = self._formatter_for_name(formatter) | ||
159 | if formatter is None: | ||
160 | output = s | ||
161 | else: | ||
162 | output = formatter(s) | ||
163 | return output | ||
164 | 168 | ||
165 | @property | 169 | :param previous_sibling: The most recently encountered element |
166 | def _is_xml(self): | 170 | on the same level of the parse tree as this one. |
167 | """Is this element part of an XML tree or an HTML tree? | ||
168 | 171 | ||
169 | This is used when mapping a formatter name ("minimal") to an | 172 | :param previous_sibling: The next element to be encountered |
170 | appropriate function (one that performs entity-substitution on | 173 | on the same level of the parse tree as this one. |
171 | the contents of <script> and <style> tags, or not). It's | ||
172 | inefficient, but it should be called very rarely. | ||
173 | """ | 174 | """ |
174 | if self.parent is None: | ||
175 | # This is the top-level object. It should have .is_xml set | ||
176 | # from tree creation. If not, take a guess--BS is usually | ||
177 | # used on HTML markup. | ||
178 | return getattr(self, 'is_xml', False) | ||
179 | return self.parent._is_xml | ||
180 | |||
181 | def _formatter_for_name(self, name): | ||
182 | "Look up a formatter function based on its name and the tree." | ||
183 | if self._is_xml: | ||
184 | return self.XML_FORMATTERS.get( | ||
185 | name, EntitySubstitution.substitute_xml) | ||
186 | else: | ||
187 | return self.HTML_FORMATTERS.get( | ||
188 | name, HTMLAwareEntitySubstitution.substitute_xml) | ||
189 | |||
190 | def setup(self, parent=None, previous_element=None, next_element=None, | ||
191 | previous_sibling=None, next_sibling=None): | ||
192 | """Sets up the initial relations between this element and | ||
193 | other elements.""" | ||
194 | self.parent = parent | 175 | self.parent = parent |
195 | 176 | ||
196 | self.previous_element = previous_element | 177 | self.previous_element = previous_element |
@@ -198,48 +179,156 @@ class PageElement(object): | |||
198 | self.previous_element.next_element = self | 179 | self.previous_element.next_element = self |
199 | 180 | ||
200 | self.next_element = next_element | 181 | self.next_element = next_element |
201 | if self.next_element: | 182 | if self.next_element is not None: |
202 | self.next_element.previous_element = self | 183 | self.next_element.previous_element = self |
203 | 184 | ||
204 | self.next_sibling = next_sibling | 185 | self.next_sibling = next_sibling |
205 | if self.next_sibling: | 186 | if self.next_sibling is not None: |
206 | self.next_sibling.previous_sibling = self | 187 | self.next_sibling.previous_sibling = self |
207 | 188 | ||
208 | if (not previous_sibling | 189 | if (previous_sibling is None |
209 | and self.parent is not None and self.parent.contents): | 190 | and self.parent is not None and self.parent.contents): |
210 | previous_sibling = self.parent.contents[-1] | 191 | previous_sibling = self.parent.contents[-1] |
211 | 192 | ||
212 | self.previous_sibling = previous_sibling | 193 | self.previous_sibling = previous_sibling |
213 | if previous_sibling: | 194 | if previous_sibling is not None: |
214 | self.previous_sibling.next_sibling = self | 195 | self.previous_sibling.next_sibling = self |
215 | 196 | ||
197 | def format_string(self, s, formatter): | ||
198 | """Format the given string using the given formatter. | ||
199 | |||
200 | :param s: A string. | ||
201 | :param formatter: A Formatter object, or a string naming one of the standard formatters. | ||
202 | """ | ||
203 | if formatter is None: | ||
204 | return s | ||
205 | if not isinstance(formatter, Formatter): | ||
206 | formatter = self.formatter_for_name(formatter) | ||
207 | output = formatter.substitute(s) | ||
208 | return output | ||
209 | |||
210 | def formatter_for_name(self, formatter): | ||
211 | """Look up or create a Formatter for the given identifier, | ||
212 | if necessary. | ||
213 | |||
214 | :param formatter: Can be a Formatter object (used as-is), a | ||
215 | function (used as the entity substitution hook for an | ||
216 | XMLFormatter or HTMLFormatter), or a string (used to look | ||
217 | up an XMLFormatter or HTMLFormatter in the appropriate | ||
218 | registry. | ||
219 | """ | ||
220 | if isinstance(formatter, Formatter): | ||
221 | return formatter | ||
222 | if self._is_xml: | ||
223 | c = XMLFormatter | ||
224 | else: | ||
225 | c = HTMLFormatter | ||
226 | if isinstance(formatter, Callable): | ||
227 | return c(entity_substitution=formatter) | ||
228 | return c.REGISTRY[formatter] | ||
229 | |||
230 | @property | ||
231 | def _is_xml(self): | ||
232 | """Is this element part of an XML tree or an HTML tree? | ||
233 | |||
234 | This is used in formatter_for_name, when deciding whether an | ||
235 | XMLFormatter or HTMLFormatter is more appropriate. It can be | ||
236 | inefficient, but it should be called very rarely. | ||
237 | """ | ||
238 | if self.known_xml is not None: | ||
239 | # Most of the time we will have determined this when the | ||
240 | # document is parsed. | ||
241 | return self.known_xml | ||
242 | |||
243 | # Otherwise, it's likely that this element was created by | ||
244 | # direct invocation of the constructor from within the user's | ||
245 | # Python code. | ||
246 | if self.parent is None: | ||
247 | # This is the top-level object. It should have .known_xml set | ||
248 | # from tree creation. If not, take a guess--BS is usually | ||
249 | # used on HTML markup. | ||
250 | return getattr(self, 'is_xml', False) | ||
251 | return self.parent._is_xml | ||
252 | |||
216 | nextSibling = _alias("next_sibling") # BS3 | 253 | nextSibling = _alias("next_sibling") # BS3 |
217 | previousSibling = _alias("previous_sibling") # BS3 | 254 | previousSibling = _alias("previous_sibling") # BS3 |
218 | 255 | ||
219 | def replace_with(self, replace_with): | 256 | default = object() |
220 | if not self.parent: | 257 | def _all_strings(self, strip=False, types=default): |
258 | """Yield all strings of certain classes, possibly stripping them. | ||
259 | |||
260 | This is implemented differently in Tag and NavigableString. | ||
261 | """ | ||
262 | raise NotImplementedError() | ||
263 | |||
264 | @property | ||
265 | def stripped_strings(self): | ||
266 | """Yield all strings in this PageElement, stripping them first. | ||
267 | |||
268 | :yield: A sequence of stripped strings. | ||
269 | """ | ||
270 | for string in self._all_strings(True): | ||
271 | yield string | ||
272 | |||
273 | def get_text(self, separator="", strip=False, | ||
274 | types=default): | ||
275 | """Get all child strings of this PageElement, concatenated using the | ||
276 | given separator. | ||
277 | |||
278 | :param separator: Strings will be concatenated using this separator. | ||
279 | |||
280 | :param strip: If True, strings will be stripped before being | ||
281 | concatenated. | ||
282 | |||
283 | :param types: A tuple of NavigableString subclasses. Any | ||
284 | strings of a subclass not found in this list will be | ||
285 | ignored. Although there are exceptions, the default | ||
286 | behavior in most cases is to consider only NavigableString | ||
287 | and CData objects. That means no comments, processing | ||
288 | instructions, etc. | ||
289 | |||
290 | :return: A string. | ||
291 | """ | ||
292 | return separator.join([s for s in self._all_strings( | ||
293 | strip, types=types)]) | ||
294 | getText = get_text | ||
295 | text = property(get_text) | ||
296 | |||
297 | def replace_with(self, *args): | ||
298 | """Replace this PageElement with one or more PageElements, keeping the | ||
299 | rest of the tree the same. | ||
300 | |||
301 | :param args: One or more PageElements. | ||
302 | :return: `self`, no longer part of the tree. | ||
303 | """ | ||
304 | if self.parent is None: | ||
221 | raise ValueError( | 305 | raise ValueError( |
222 | "Cannot replace one element with another when the" | 306 | "Cannot replace one element with another when the " |
223 | "element to be replaced is not part of a tree.") | 307 | "element to be replaced is not part of a tree.") |
224 | if replace_with is self: | 308 | if len(args) == 1 and args[0] is self: |
225 | return | 309 | return |
226 | if replace_with is self.parent: | 310 | if any(x is self.parent for x in args): |
227 | raise ValueError("Cannot replace a Tag with its parent.") | 311 | raise ValueError("Cannot replace a Tag with its parent.") |
228 | old_parent = self.parent | 312 | old_parent = self.parent |
229 | my_index = self.parent.index(self) | 313 | my_index = self.parent.index(self) |
230 | self.extract() | 314 | self.extract(_self_index=my_index) |
231 | old_parent.insert(my_index, replace_with) | 315 | for idx, replace_with in enumerate(args, start=my_index): |
316 | old_parent.insert(idx, replace_with) | ||
232 | return self | 317 | return self |
233 | replaceWith = replace_with # BS3 | 318 | replaceWith = replace_with # BS3 |
234 | 319 | ||
235 | def unwrap(self): | 320 | def unwrap(self): |
321 | """Replace this PageElement with its contents. | ||
322 | |||
323 | :return: `self`, no longer part of the tree. | ||
324 | """ | ||
236 | my_parent = self.parent | 325 | my_parent = self.parent |
237 | if not self.parent: | 326 | if self.parent is None: |
238 | raise ValueError( | 327 | raise ValueError( |
239 | "Cannot replace an element with its contents when that" | 328 | "Cannot replace an element with its contents when that" |
240 | "element is not part of a tree.") | 329 | "element is not part of a tree.") |
241 | my_index = self.parent.index(self) | 330 | my_index = self.parent.index(self) |
242 | self.extract() | 331 | self.extract(_self_index=my_index) |
243 | for child in reversed(self.contents[:]): | 332 | for child in reversed(self.contents[:]): |
244 | my_parent.insert(my_index, child) | 333 | my_parent.insert(my_index, child) |
245 | return self | 334 | return self |
@@ -247,14 +336,29 @@ class PageElement(object): | |||
247 | replaceWithChildren = unwrap # BS3 | 336 | replaceWithChildren = unwrap # BS3 |
248 | 337 | ||
249 | def wrap(self, wrap_inside): | 338 | def wrap(self, wrap_inside): |
339 | """Wrap this PageElement inside another one. | ||
340 | |||
341 | :param wrap_inside: A PageElement. | ||
342 | :return: `wrap_inside`, occupying the position in the tree that used | ||
343 | to be occupied by `self`, and with `self` inside it. | ||
344 | """ | ||
250 | me = self.replace_with(wrap_inside) | 345 | me = self.replace_with(wrap_inside) |
251 | wrap_inside.append(me) | 346 | wrap_inside.append(me) |
252 | return wrap_inside | 347 | return wrap_inside |
253 | 348 | ||
254 | def extract(self): | 349 | def extract(self, _self_index=None): |
255 | """Destructively rips this element out of the tree.""" | 350 | """Destructively rips this element out of the tree. |
351 | |||
352 | :param _self_index: The location of this element in its parent's | ||
353 | .contents, if known. Passing this in allows for a performance | ||
354 | optimization. | ||
355 | |||
356 | :return: `self`, no longer part of the tree. | ||
357 | """ | ||
256 | if self.parent is not None: | 358 | if self.parent is not None: |
257 | del self.parent.contents[self.parent.index(self)] | 359 | if _self_index is None: |
360 | _self_index = self.parent.index(self) | ||
361 | del self.parent.contents[_self_index] | ||
258 | 362 | ||
259 | #Find the two elements that would be next to each other if | 363 | #Find the two elements that would be next to each other if |
260 | #this element (and any children) hadn't been parsed. Connect | 364 | #this element (and any children) hadn't been parsed. Connect |
@@ -281,8 +385,13 @@ class PageElement(object): | |||
281 | return self | 385 | return self |
282 | 386 | ||
283 | def _last_descendant(self, is_initialized=True, accept_self=True): | 387 | def _last_descendant(self, is_initialized=True, accept_self=True): |
284 | "Finds the last element beneath this object to be parsed." | 388 | """Finds the last element beneath this object to be parsed. |
285 | if is_initialized and self.next_sibling: | 389 | |
390 | :param is_initialized: Has `setup` been called on this PageElement | ||
391 | yet? | ||
392 | :param accept_self: Is `self` an acceptable answer to the question? | ||
393 | """ | ||
394 | if is_initialized and self.next_sibling is not None: | ||
286 | last_child = self.next_sibling.previous_element | 395 | last_child = self.next_sibling.previous_element |
287 | else: | 396 | else: |
288 | last_child = self | 397 | last_child = self |
@@ -295,6 +404,14 @@ class PageElement(object): | |||
295 | _lastRecursiveChild = _last_descendant | 404 | _lastRecursiveChild = _last_descendant |
296 | 405 | ||
297 | def insert(self, position, new_child): | 406 | def insert(self, position, new_child): |
407 | """Insert a new PageElement in the list of this PageElement's children. | ||
408 | |||
409 | This works the same way as `list.insert`. | ||
410 | |||
411 | :param position: The numeric position that should be occupied | ||
412 | in `self.children` by the new PageElement. | ||
413 | :param new_child: A PageElement. | ||
414 | """ | ||
298 | if new_child is None: | 415 | if new_child is None: |
299 | raise ValueError("Cannot insert None into a tag.") | 416 | raise ValueError("Cannot insert None into a tag.") |
300 | if new_child is self: | 417 | if new_child is self: |
@@ -303,6 +420,14 @@ class PageElement(object): | |||
303 | and not isinstance(new_child, NavigableString)): | 420 | and not isinstance(new_child, NavigableString)): |
304 | new_child = NavigableString(new_child) | 421 | new_child = NavigableString(new_child) |
305 | 422 | ||
423 | from bs4 import BeautifulSoup | ||
424 | if isinstance(new_child, BeautifulSoup): | ||
425 | # We don't want to end up with a situation where one BeautifulSoup | ||
426 | # object contains another. Insert the children one at a time. | ||
427 | for subchild in list(new_child.contents): | ||
428 | self.insert(position, subchild) | ||
429 | position += 1 | ||
430 | return | ||
306 | position = min(position, len(self.contents)) | 431 | position = min(position, len(self.contents)) |
307 | if hasattr(new_child, 'parent') and new_child.parent is not None: | 432 | if hasattr(new_child, 'parent') and new_child.parent is not None: |
308 | # We're 'inserting' an element that's already one | 433 | # We're 'inserting' an element that's already one |
@@ -361,160 +486,326 @@ class PageElement(object): | |||
361 | self.contents.insert(position, new_child) | 486 | self.contents.insert(position, new_child) |
362 | 487 | ||
363 | def append(self, tag): | 488 | def append(self, tag): |
364 | """Appends the given tag to the contents of this tag.""" | 489 | """Appends the given PageElement to the contents of this one. |
490 | |||
491 | :param tag: A PageElement. | ||
492 | """ | ||
365 | self.insert(len(self.contents), tag) | 493 | self.insert(len(self.contents), tag) |
366 | 494 | ||
367 | def insert_before(self, predecessor): | 495 | def extend(self, tags): |
368 | """Makes the given element the immediate predecessor of this one. | 496 | """Appends the given PageElements to this one's contents. |
369 | 497 | ||
370 | The two elements will have the same parent, and the given element | 498 | :param tags: A list of PageElements. If a single Tag is |
499 | provided instead, this PageElement's contents will be extended | ||
500 | with that Tag's contents. | ||
501 | """ | ||
502 | if isinstance(tags, Tag): | ||
503 | tags = tags.contents | ||
504 | if isinstance(tags, list): | ||
505 | # Moving items around the tree may change their position in | ||
506 | # the original list. Make a list that won't change. | ||
507 | tags = list(tags) | ||
508 | for tag in tags: | ||
509 | self.append(tag) | ||
510 | |||
511 | def insert_before(self, *args): | ||
512 | """Makes the given element(s) the immediate predecessor of this one. | ||
513 | |||
514 | All the elements will have the same parent, and the given elements | ||
371 | will be immediately before this one. | 515 | will be immediately before this one. |
516 | |||
517 | :param args: One or more PageElements. | ||
372 | """ | 518 | """ |
373 | if self is predecessor: | ||
374 | raise ValueError("Can't insert an element before itself.") | ||
375 | parent = self.parent | 519 | parent = self.parent |
376 | if parent is None: | 520 | if parent is None: |
377 | raise ValueError( | 521 | raise ValueError( |
378 | "Element has no parent, so 'before' has no meaning.") | 522 | "Element has no parent, so 'before' has no meaning.") |
379 | # Extract first so that the index won't be screwed up if they | 523 | if any(x is self for x in args): |
380 | # are siblings. | 524 | raise ValueError("Can't insert an element before itself.") |
381 | if isinstance(predecessor, PageElement): | 525 | for predecessor in args: |
382 | predecessor.extract() | 526 | # Extract first so that the index won't be screwed up if they |
383 | index = parent.index(self) | 527 | # are siblings. |
384 | parent.insert(index, predecessor) | 528 | if isinstance(predecessor, PageElement): |
385 | 529 | predecessor.extract() | |
386 | def insert_after(self, successor): | 530 | index = parent.index(self) |
387 | """Makes the given element the immediate successor of this one. | 531 | parent.insert(index, predecessor) |
388 | 532 | ||
389 | The two elements will have the same parent, and the given element | 533 | def insert_after(self, *args): |
534 | """Makes the given element(s) the immediate successor of this one. | ||
535 | |||
536 | The elements will have the same parent, and the given elements | ||
390 | will be immediately after this one. | 537 | will be immediately after this one. |
538 | |||
539 | :param args: One or more PageElements. | ||
391 | """ | 540 | """ |
392 | if self is successor: | 541 | # Do all error checking before modifying the tree. |
393 | raise ValueError("Can't insert an element after itself.") | ||
394 | parent = self.parent | 542 | parent = self.parent |
395 | if parent is None: | 543 | if parent is None: |
396 | raise ValueError( | 544 | raise ValueError( |
397 | "Element has no parent, so 'after' has no meaning.") | 545 | "Element has no parent, so 'after' has no meaning.") |
398 | # Extract first so that the index won't be screwed up if they | 546 | if any(x is self for x in args): |
399 | # are siblings. | 547 | raise ValueError("Can't insert an element after itself.") |
400 | if isinstance(successor, PageElement): | 548 | |
401 | successor.extract() | 549 | offset = 0 |
402 | index = parent.index(self) | 550 | for successor in args: |
403 | parent.insert(index+1, successor) | 551 | # Extract first so that the index won't be screwed up if they |
404 | 552 | # are siblings. | |
405 | def find_next(self, name=None, attrs={}, text=None, **kwargs): | 553 | if isinstance(successor, PageElement): |
406 | """Returns the first item that matches the given criteria and | 554 | successor.extract() |
407 | appears after this Tag in the document.""" | 555 | index = parent.index(self) |
408 | return self._find_one(self.find_all_next, name, attrs, text, **kwargs) | 556 | parent.insert(index+1+offset, successor) |
557 | offset += 1 | ||
558 | |||
559 | def find_next(self, name=None, attrs={}, string=None, **kwargs): | ||
560 | """Find the first PageElement that matches the given criteria and | ||
561 | appears later in the document than this PageElement. | ||
562 | |||
563 | All find_* methods take a common set of arguments. See the online | ||
564 | documentation for detailed explanations. | ||
565 | |||
566 | :param name: A filter on tag name. | ||
567 | :param attrs: A dictionary of filters on attribute values. | ||
568 | :param string: A filter for a NavigableString with specific text. | ||
569 | :kwargs: A dictionary of filters on attribute values. | ||
570 | :return: A PageElement. | ||
571 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
572 | """ | ||
573 | return self._find_one(self.find_all_next, name, attrs, string, **kwargs) | ||
409 | findNext = find_next # BS3 | 574 | findNext = find_next # BS3 |
410 | 575 | ||
411 | def find_all_next(self, name=None, attrs={}, text=None, limit=None, | 576 | def find_all_next(self, name=None, attrs={}, string=None, limit=None, |
412 | **kwargs): | 577 | **kwargs): |
413 | """Returns all items that match the given criteria and appear | 578 | """Find all PageElements that match the given criteria and appear |
414 | after this Tag in the document.""" | 579 | later in the document than this PageElement. |
415 | return self._find_all(name, attrs, text, limit, self.next_elements, | 580 | |
416 | **kwargs) | 581 | All find_* methods take a common set of arguments. See the online |
582 | documentation for detailed explanations. | ||
583 | |||
584 | :param name: A filter on tag name. | ||
585 | :param attrs: A dictionary of filters on attribute values. | ||
586 | :param string: A filter for a NavigableString with specific text. | ||
587 | :param limit: Stop looking after finding this many results. | ||
588 | :kwargs: A dictionary of filters on attribute values. | ||
589 | :return: A ResultSet containing PageElements. | ||
590 | """ | ||
591 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
592 | return self._find_all(name, attrs, string, limit, self.next_elements, | ||
593 | _stacklevel=_stacklevel+1, **kwargs) | ||
417 | findAllNext = find_all_next # BS3 | 594 | findAllNext = find_all_next # BS3 |
418 | 595 | ||
419 | def find_next_sibling(self, name=None, attrs={}, text=None, **kwargs): | 596 | def find_next_sibling(self, name=None, attrs={}, string=None, **kwargs): |
420 | """Returns the closest sibling to this Tag that matches the | 597 | """Find the closest sibling to this PageElement that matches the |
421 | given criteria and appears after this Tag in the document.""" | 598 | given criteria and appears later in the document. |
422 | return self._find_one(self.find_next_siblings, name, attrs, text, | 599 | |
600 | All find_* methods take a common set of arguments. See the | ||
601 | online documentation for detailed explanations. | ||
602 | |||
603 | :param name: A filter on tag name. | ||
604 | :param attrs: A dictionary of filters on attribute values. | ||
605 | :param string: A filter for a NavigableString with specific text. | ||
606 | :kwargs: A dictionary of filters on attribute values. | ||
607 | :return: A PageElement. | ||
608 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
609 | """ | ||
610 | return self._find_one(self.find_next_siblings, name, attrs, string, | ||
423 | **kwargs) | 611 | **kwargs) |
424 | findNextSibling = find_next_sibling # BS3 | 612 | findNextSibling = find_next_sibling # BS3 |
425 | 613 | ||
426 | def find_next_siblings(self, name=None, attrs={}, text=None, limit=None, | 614 | def find_next_siblings(self, name=None, attrs={}, string=None, limit=None, |
427 | **kwargs): | 615 | **kwargs): |
428 | """Returns the siblings of this Tag that match the given | 616 | """Find all siblings of this PageElement that match the given criteria |
429 | criteria and appear after this Tag in the document.""" | 617 | and appear later in the document. |
430 | return self._find_all(name, attrs, text, limit, | 618 | |
431 | self.next_siblings, **kwargs) | 619 | All find_* methods take a common set of arguments. See the online |
620 | documentation for detailed explanations. | ||
621 | |||
622 | :param name: A filter on tag name. | ||
623 | :param attrs: A dictionary of filters on attribute values. | ||
624 | :param string: A filter for a NavigableString with specific text. | ||
625 | :param limit: Stop looking after finding this many results. | ||
626 | :kwargs: A dictionary of filters on attribute values. | ||
627 | :return: A ResultSet of PageElements. | ||
628 | :rtype: bs4.element.ResultSet | ||
629 | """ | ||
630 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
631 | return self._find_all( | ||
632 | name, attrs, string, limit, | ||
633 | self.next_siblings, _stacklevel=_stacklevel+1, **kwargs | ||
634 | ) | ||
432 | findNextSiblings = find_next_siblings # BS3 | 635 | findNextSiblings = find_next_siblings # BS3 |
433 | fetchNextSiblings = find_next_siblings # BS2 | 636 | fetchNextSiblings = find_next_siblings # BS2 |
434 | 637 | ||
435 | def find_previous(self, name=None, attrs={}, text=None, **kwargs): | 638 | def find_previous(self, name=None, attrs={}, string=None, **kwargs): |
436 | """Returns the first item that matches the given criteria and | 639 | """Look backwards in the document from this PageElement and find the |
437 | appears before this Tag in the document.""" | 640 | first PageElement that matches the given criteria. |
641 | |||
642 | All find_* methods take a common set of arguments. See the online | ||
643 | documentation for detailed explanations. | ||
644 | |||
645 | :param name: A filter on tag name. | ||
646 | :param attrs: A dictionary of filters on attribute values. | ||
647 | :param string: A filter for a NavigableString with specific text. | ||
648 | :kwargs: A dictionary of filters on attribute values. | ||
649 | :return: A PageElement. | ||
650 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
651 | """ | ||
438 | return self._find_one( | 652 | return self._find_one( |
439 | self.find_all_previous, name, attrs, text, **kwargs) | 653 | self.find_all_previous, name, attrs, string, **kwargs) |
440 | findPrevious = find_previous # BS3 | 654 | findPrevious = find_previous # BS3 |
441 | 655 | ||
442 | def find_all_previous(self, name=None, attrs={}, text=None, limit=None, | 656 | def find_all_previous(self, name=None, attrs={}, string=None, limit=None, |
443 | **kwargs): | 657 | **kwargs): |
444 | """Returns all items that match the given criteria and appear | 658 | """Look backwards in the document from this PageElement and find all |
445 | before this Tag in the document.""" | 659 | PageElements that match the given criteria. |
446 | return self._find_all(name, attrs, text, limit, self.previous_elements, | 660 | |
447 | **kwargs) | 661 | All find_* methods take a common set of arguments. See the online |
662 | documentation for detailed explanations. | ||
663 | |||
664 | :param name: A filter on tag name. | ||
665 | :param attrs: A dictionary of filters on attribute values. | ||
666 | :param string: A filter for a NavigableString with specific text. | ||
667 | :param limit: Stop looking after finding this many results. | ||
668 | :kwargs: A dictionary of filters on attribute values. | ||
669 | :return: A ResultSet of PageElements. | ||
670 | :rtype: bs4.element.ResultSet | ||
671 | """ | ||
672 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
673 | return self._find_all( | ||
674 | name, attrs, string, limit, self.previous_elements, | ||
675 | _stacklevel=_stacklevel+1, **kwargs | ||
676 | ) | ||
448 | findAllPrevious = find_all_previous # BS3 | 677 | findAllPrevious = find_all_previous # BS3 |
449 | fetchPrevious = find_all_previous # BS2 | 678 | fetchPrevious = find_all_previous # BS2 |
450 | 679 | ||
451 | def find_previous_sibling(self, name=None, attrs={}, text=None, **kwargs): | 680 | def find_previous_sibling(self, name=None, attrs={}, string=None, **kwargs): |
452 | """Returns the closest sibling to this Tag that matches the | 681 | """Returns the closest sibling to this PageElement that matches the |
453 | given criteria and appears before this Tag in the document.""" | 682 | given criteria and appears earlier in the document. |
454 | return self._find_one(self.find_previous_siblings, name, attrs, text, | 683 | |
684 | All find_* methods take a common set of arguments. See the online | ||
685 | documentation for detailed explanations. | ||
686 | |||
687 | :param name: A filter on tag name. | ||
688 | :param attrs: A dictionary of filters on attribute values. | ||
689 | :param string: A filter for a NavigableString with specific text. | ||
690 | :kwargs: A dictionary of filters on attribute values. | ||
691 | :return: A PageElement. | ||
692 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
693 | """ | ||
694 | return self._find_one(self.find_previous_siblings, name, attrs, string, | ||
455 | **kwargs) | 695 | **kwargs) |
456 | findPreviousSibling = find_previous_sibling # BS3 | 696 | findPreviousSibling = find_previous_sibling # BS3 |
457 | 697 | ||
458 | def find_previous_siblings(self, name=None, attrs={}, text=None, | 698 | def find_previous_siblings(self, name=None, attrs={}, string=None, |
459 | limit=None, **kwargs): | 699 | limit=None, **kwargs): |
460 | """Returns the siblings of this Tag that match the given | 700 | """Returns all siblings to this PageElement that match the |
461 | criteria and appear before this Tag in the document.""" | 701 | given criteria and appear earlier in the document. |
462 | return self._find_all(name, attrs, text, limit, | 702 | |
463 | self.previous_siblings, **kwargs) | 703 | All find_* methods take a common set of arguments. See the online |
704 | documentation for detailed explanations. | ||
705 | |||
706 | :param name: A filter on tag name. | ||
707 | :param attrs: A dictionary of filters on attribute values. | ||
708 | :param string: A filter for a NavigableString with specific text. | ||
709 | :param limit: Stop looking after finding this many results. | ||
710 | :kwargs: A dictionary of filters on attribute values. | ||
711 | :return: A ResultSet of PageElements. | ||
712 | :rtype: bs4.element.ResultSet | ||
713 | """ | ||
714 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
715 | return self._find_all( | ||
716 | name, attrs, string, limit, | ||
717 | self.previous_siblings, _stacklevel=_stacklevel+1, **kwargs | ||
718 | ) | ||
464 | findPreviousSiblings = find_previous_siblings # BS3 | 719 | findPreviousSiblings = find_previous_siblings # BS3 |
465 | fetchPreviousSiblings = find_previous_siblings # BS2 | 720 | fetchPreviousSiblings = find_previous_siblings # BS2 |
466 | 721 | ||
467 | def find_parent(self, name=None, attrs={}, **kwargs): | 722 | def find_parent(self, name=None, attrs={}, **kwargs): |
468 | """Returns the closest parent of this Tag that matches the given | 723 | """Find the closest parent of this PageElement that matches the given |
469 | criteria.""" | 724 | criteria. |
725 | |||
726 | All find_* methods take a common set of arguments. See the online | ||
727 | documentation for detailed explanations. | ||
728 | |||
729 | :param name: A filter on tag name. | ||
730 | :param attrs: A dictionary of filters on attribute values. | ||
731 | :kwargs: A dictionary of filters on attribute values. | ||
732 | |||
733 | :return: A PageElement. | ||
734 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
735 | """ | ||
470 | # NOTE: We can't use _find_one because findParents takes a different | 736 | # NOTE: We can't use _find_one because findParents takes a different |
471 | # set of arguments. | 737 | # set of arguments. |
472 | r = None | 738 | r = None |
473 | l = self.find_parents(name, attrs, 1, **kwargs) | 739 | l = self.find_parents(name, attrs, 1, _stacklevel=3, **kwargs) |
474 | if l: | 740 | if l: |
475 | r = l[0] | 741 | r = l[0] |
476 | return r | 742 | return r |
477 | findParent = find_parent # BS3 | 743 | findParent = find_parent # BS3 |
478 | 744 | ||
479 | def find_parents(self, name=None, attrs={}, limit=None, **kwargs): | 745 | def find_parents(self, name=None, attrs={}, limit=None, **kwargs): |
480 | """Returns the parents of this Tag that match the given | 746 | """Find all parents of this PageElement that match the given criteria. |
481 | criteria.""" | 747 | |
748 | All find_* methods take a common set of arguments. See the online | ||
749 | documentation for detailed explanations. | ||
482 | 750 | ||
751 | :param name: A filter on tag name. | ||
752 | :param attrs: A dictionary of filters on attribute values. | ||
753 | :param limit: Stop looking after finding this many results. | ||
754 | :kwargs: A dictionary of filters on attribute values. | ||
755 | |||
756 | :return: A PageElement. | ||
757 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
758 | """ | ||
759 | _stacklevel = kwargs.pop('_stacklevel', 2) | ||
483 | return self._find_all(name, attrs, None, limit, self.parents, | 760 | return self._find_all(name, attrs, None, limit, self.parents, |
484 | **kwargs) | 761 | _stacklevel=_stacklevel+1, **kwargs) |
485 | findParents = find_parents # BS3 | 762 | findParents = find_parents # BS3 |
486 | fetchParents = find_parents # BS2 | 763 | fetchParents = find_parents # BS2 |
487 | 764 | ||
488 | @property | 765 | @property |
489 | def next(self): | 766 | def next(self): |
767 | """The PageElement, if any, that was parsed just after this one. | ||
768 | |||
769 | :return: A PageElement. | ||
770 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
771 | """ | ||
490 | return self.next_element | 772 | return self.next_element |
491 | 773 | ||
492 | @property | 774 | @property |
493 | def previous(self): | 775 | def previous(self): |
776 | """The PageElement, if any, that was parsed just before this one. | ||
777 | |||
778 | :return: A PageElement. | ||
779 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
780 | """ | ||
494 | return self.previous_element | 781 | return self.previous_element |
495 | 782 | ||
496 | #These methods do the real heavy lifting. | 783 | #These methods do the real heavy lifting. |
497 | 784 | ||
498 | def _find_one(self, method, name, attrs, text, **kwargs): | 785 | def _find_one(self, method, name, attrs, string, **kwargs): |
499 | r = None | 786 | r = None |
500 | l = method(name, attrs, text, 1, **kwargs) | 787 | l = method(name, attrs, string, 1, _stacklevel=4, **kwargs) |
501 | if l: | 788 | if l: |
502 | r = l[0] | 789 | r = l[0] |
503 | return r | 790 | return r |
504 | 791 | ||
505 | def _find_all(self, name, attrs, text, limit, generator, **kwargs): | 792 | def _find_all(self, name, attrs, string, limit, generator, **kwargs): |
506 | "Iterates over a generator looking for things that match." | 793 | "Iterates over a generator looking for things that match." |
794 | _stacklevel = kwargs.pop('_stacklevel', 3) | ||
507 | 795 | ||
508 | if text is None and 'string' in kwargs: | 796 | if string is None and 'text' in kwargs: |
509 | text = kwargs['string'] | 797 | string = kwargs.pop('text') |
510 | del kwargs['string'] | 798 | warnings.warn( |
799 | "The 'text' argument to find()-type methods is deprecated. Use 'string' instead.", | ||
800 | DeprecationWarning, stacklevel=_stacklevel | ||
801 | ) | ||
511 | 802 | ||
512 | if isinstance(name, SoupStrainer): | 803 | if isinstance(name, SoupStrainer): |
513 | strainer = name | 804 | strainer = name |
514 | else: | 805 | else: |
515 | strainer = SoupStrainer(name, attrs, text, **kwargs) | 806 | strainer = SoupStrainer(name, attrs, string, **kwargs) |
516 | 807 | ||
517 | if text is None and not limit and not attrs and not kwargs: | 808 | if string is None and not limit and not attrs and not kwargs: |
518 | if name is True or name is None: | 809 | if name is True or name is None: |
519 | # Optimization to find all tags. | 810 | # Optimization to find all tags. |
520 | result = (element for element in generator | 811 | result = (element for element in generator |
@@ -522,9 +813,23 @@ class PageElement(object): | |||
522 | return ResultSet(strainer, result) | 813 | return ResultSet(strainer, result) |
523 | elif isinstance(name, str): | 814 | elif isinstance(name, str): |
524 | # Optimization to find all tags with a given name. | 815 | # Optimization to find all tags with a given name. |
816 | if name.count(':') == 1: | ||
817 | # This is a name with a prefix. If this is a namespace-aware document, | ||
818 | # we need to match the local name against tag.name. If not, | ||
819 | # we need to match the fully-qualified name against tag.name. | ||
820 | prefix, local_name = name.split(':', 1) | ||
821 | else: | ||
822 | prefix = None | ||
823 | local_name = name | ||
525 | result = (element for element in generator | 824 | result = (element for element in generator |
526 | if isinstance(element, Tag) | 825 | if isinstance(element, Tag) |
527 | and element.name == name) | 826 | and ( |
827 | element.name == name | ||
828 | ) or ( | ||
829 | element.name == local_name | ||
830 | and (prefix is None or element.prefix == prefix) | ||
831 | ) | ||
832 | ) | ||
528 | return ResultSet(strainer, result) | 833 | return ResultSet(strainer, result) |
529 | results = ResultSet(strainer) | 834 | results = ResultSet(strainer) |
530 | while True: | 835 | while True: |
@@ -544,6 +849,10 @@ class PageElement(object): | |||
544 | #NavigableStrings and Tags. | 849 | #NavigableStrings and Tags. |
545 | @property | 850 | @property |
546 | def next_elements(self): | 851 | def next_elements(self): |
852 | """All PageElements that were parsed after this one. | ||
853 | |||
854 | :yield: A sequence of PageElements. | ||
855 | """ | ||
547 | i = self.next_element | 856 | i = self.next_element |
548 | while i is not None: | 857 | while i is not None: |
549 | yield i | 858 | yield i |
@@ -551,6 +860,11 @@ class PageElement(object): | |||
551 | 860 | ||
552 | @property | 861 | @property |
553 | def next_siblings(self): | 862 | def next_siblings(self): |
863 | """All PageElements that are siblings of this one but were parsed | ||
864 | later. | ||
865 | |||
866 | :yield: A sequence of PageElements. | ||
867 | """ | ||
554 | i = self.next_sibling | 868 | i = self.next_sibling |
555 | while i is not None: | 869 | while i is not None: |
556 | yield i | 870 | yield i |
@@ -558,6 +872,10 @@ class PageElement(object): | |||
558 | 872 | ||
559 | @property | 873 | @property |
560 | def previous_elements(self): | 874 | def previous_elements(self): |
875 | """All PageElements that were parsed before this one. | ||
876 | |||
877 | :yield: A sequence of PageElements. | ||
878 | """ | ||
561 | i = self.previous_element | 879 | i = self.previous_element |
562 | while i is not None: | 880 | while i is not None: |
563 | yield i | 881 | yield i |
@@ -565,6 +883,11 @@ class PageElement(object): | |||
565 | 883 | ||
566 | @property | 884 | @property |
567 | def previous_siblings(self): | 885 | def previous_siblings(self): |
886 | """All PageElements that are siblings of this one but were parsed | ||
887 | earlier. | ||
888 | |||
889 | :yield: A sequence of PageElements. | ||
890 | """ | ||
568 | i = self.previous_sibling | 891 | i = self.previous_sibling |
569 | while i is not None: | 892 | while i is not None: |
570 | yield i | 893 | yield i |
@@ -572,87 +895,23 @@ class PageElement(object): | |||
572 | 895 | ||
573 | @property | 896 | @property |
574 | def parents(self): | 897 | def parents(self): |
898 | """All PageElements that are parents of this PageElement. | ||
899 | |||
900 | :yield: A sequence of PageElements. | ||
901 | """ | ||
575 | i = self.parent | 902 | i = self.parent |
576 | while i is not None: | 903 | while i is not None: |
577 | yield i | 904 | yield i |
578 | i = i.parent | 905 | i = i.parent |
579 | 906 | ||
580 | # Methods for supporting CSS selectors. | 907 | @property |
581 | 908 | def decomposed(self): | |
582 | tag_name_re = re.compile(r'^[a-zA-Z0-9][-.a-zA-Z0-9:_]*$') | 909 | """Check whether a PageElement has been decomposed. |
583 | |||
584 | # /^([a-zA-Z0-9][-.a-zA-Z0-9:_]*)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/ | ||
585 | # \---------------------------/ \---/\-------------/ \-------/ | ||
586 | # | | | | | ||
587 | # | | | The value | ||
588 | # | | ~,|,^,$,* or = | ||
589 | # | Attribute | ||
590 | # Tag | ||
591 | attribselect_re = re.compile( | ||
592 | r'^(?P<tag>[a-zA-Z0-9][-.a-zA-Z0-9:_]*)?\[(?P<attribute>[\w-]+)(?P<operator>[=~\|\^\$\*]?)' + | ||
593 | r'=?"?(?P<value>[^\]"]*)"?\]$' | ||
594 | ) | ||
595 | |||
596 | def _attr_value_as_string(self, value, default=None): | ||
597 | """Force an attribute value into a string representation. | ||
598 | 910 | ||
599 | A multi-valued attribute will be converted into a | 911 | :rtype: bool |
600 | space-separated stirng. | ||
601 | """ | 912 | """ |
602 | value = self.get(value, default) | 913 | return getattr(self, '_decomposed', False) or False |
603 | if isinstance(value, list) or isinstance(value, tuple): | 914 | |
604 | value =" ".join(value) | ||
605 | return value | ||
606 | |||
607 | def _tag_name_matches_and(self, function, tag_name): | ||
608 | if not tag_name: | ||
609 | return function | ||
610 | else: | ||
611 | def _match(tag): | ||
612 | return tag.name == tag_name and function(tag) | ||
613 | return _match | ||
614 | |||
615 | def _attribute_checker(self, operator, attribute, value=''): | ||
616 | """Create a function that performs a CSS selector operation. | ||
617 | |||
618 | Takes an operator, attribute and optional value. Returns a | ||
619 | function that will return True for elements that match that | ||
620 | combination. | ||
621 | """ | ||
622 | if operator == '=': | ||
623 | # string representation of `attribute` is equal to `value` | ||
624 | return lambda el: el._attr_value_as_string(attribute) == value | ||
625 | elif operator == '~': | ||
626 | # space-separated list representation of `attribute` | ||
627 | # contains `value` | ||
628 | def _includes_value(element): | ||
629 | attribute_value = element.get(attribute, []) | ||
630 | if not isinstance(attribute_value, list): | ||
631 | attribute_value = attribute_value.split() | ||
632 | return value in attribute_value | ||
633 | return _includes_value | ||
634 | elif operator == '^': | ||
635 | # string representation of `attribute` starts with `value` | ||
636 | return lambda el: el._attr_value_as_string( | ||
637 | attribute, '').startswith(value) | ||
638 | elif operator == '$': | ||
639 | # string represenation of `attribute` ends with `value` | ||
640 | return lambda el: el._attr_value_as_string( | ||
641 | attribute, '').endswith(value) | ||
642 | elif operator == '*': | ||
643 | # string representation of `attribute` contains `value` | ||
644 | return lambda el: value in el._attr_value_as_string(attribute, '') | ||
645 | elif operator == '|': | ||
646 | # string representation of `attribute` is either exactly | ||
647 | # `value` or starts with `value` and then a dash. | ||
648 | def _is_or_starts_with_dash(element): | ||
649 | attribute_value = element._attr_value_as_string(attribute, '') | ||
650 | return (attribute_value == value or attribute_value.startswith( | ||
651 | value + '-')) | ||
652 | return _is_or_starts_with_dash | ||
653 | else: | ||
654 | return lambda el: el.has_attr(attribute) | ||
655 | |||
656 | # Old non-property versions of the generators, for backwards | 915 | # Old non-property versions of the generators, for backwards |
657 | # compatibility with BS3. | 916 | # compatibility with BS3. |
658 | def nextGenerator(self): | 917 | def nextGenerator(self): |
@@ -672,6 +931,11 @@ class PageElement(object): | |||
672 | 931 | ||
673 | 932 | ||
674 | class NavigableString(str, PageElement): | 933 | class NavigableString(str, PageElement): |
934 | """A Python Unicode string that is part of a parse tree. | ||
935 | |||
936 | When Beautiful Soup parses the markup <b>penguin</b>, it will | ||
937 | create a NavigableString for the string "penguin". | ||
938 | """ | ||
675 | 939 | ||
676 | PREFIX = '' | 940 | PREFIX = '' |
677 | SUFFIX = '' | 941 | SUFFIX = '' |
@@ -691,12 +955,22 @@ class NavigableString(str, PageElement): | |||
691 | u.setup() | 955 | u.setup() |
692 | return u | 956 | return u |
693 | 957 | ||
694 | def __copy__(self): | 958 | def __deepcopy__(self, memo, recursive=False): |
695 | """A copy of a NavigableString has the same contents and class | 959 | """A copy of a NavigableString has the same contents and class |
696 | as the original, but it is not connected to the parse tree. | 960 | as the original, but it is not connected to the parse tree. |
961 | |||
962 | :param recursive: This parameter is ignored; it's only defined | ||
963 | so that NavigableString.__deepcopy__ implements the same | ||
964 | signature as Tag.__deepcopy__. | ||
697 | """ | 965 | """ |
698 | return type(self)(self) | 966 | return type(self)(self) |
699 | 967 | ||
968 | def __copy__(self): | ||
969 | """A copy of a NavigableString can only be a deep copy, because | ||
970 | only one PageElement can occupy a given place in a parse tree. | ||
971 | """ | ||
972 | return self.__deepcopy__({}) | ||
973 | |||
700 | def __getnewargs__(self): | 974 | def __getnewargs__(self): |
701 | return (str(self),) | 975 | return (str(self),) |
702 | 976 | ||
@@ -712,55 +986,146 @@ class NavigableString(str, PageElement): | |||
712 | self.__class__.__name__, attr)) | 986 | self.__class__.__name__, attr)) |
713 | 987 | ||
714 | def output_ready(self, formatter="minimal"): | 988 | def output_ready(self, formatter="minimal"): |
989 | """Run the string through the provided formatter. | ||
990 | |||
991 | :param formatter: A Formatter object, or a string naming one of the standard formatters. | ||
992 | """ | ||
715 | output = self.format_string(self, formatter) | 993 | output = self.format_string(self, formatter) |
716 | return self.PREFIX + output + self.SUFFIX | 994 | return self.PREFIX + output + self.SUFFIX |
717 | 995 | ||
718 | @property | 996 | @property |
719 | def name(self): | 997 | def name(self): |
998 | """Since a NavigableString is not a Tag, it has no .name. | ||
999 | |||
1000 | This property is implemented so that code like this doesn't crash | ||
1001 | when run on a mixture of Tag and NavigableString objects: | ||
1002 | [x.name for x in tag.children] | ||
1003 | """ | ||
720 | return None | 1004 | return None |
721 | 1005 | ||
722 | @name.setter | 1006 | @name.setter |
723 | def name(self, name): | 1007 | def name(self, name): |
1008 | """Prevent NavigableString.name from ever being set.""" | ||
724 | raise AttributeError("A NavigableString cannot be given a name.") | 1009 | raise AttributeError("A NavigableString cannot be given a name.") |
725 | 1010 | ||
1011 | def _all_strings(self, strip=False, types=PageElement.default): | ||
1012 | """Yield all strings of certain classes, possibly stripping them. | ||
1013 | |||
1014 | This makes it easy for NavigableString to implement methods | ||
1015 | like get_text() as conveniences, creating a consistent | ||
1016 | text-extraction API across all PageElements. | ||
1017 | |||
1018 | :param strip: If True, all strings will be stripped before being | ||
1019 | yielded. | ||
1020 | |||
1021 | :param types: A tuple of NavigableString subclasses. If this | ||
1022 | NavigableString isn't one of those subclasses, the | ||
1023 | sequence will be empty. By default, the subclasses | ||
1024 | considered are NavigableString and CData objects. That | ||
1025 | means no comments, processing instructions, etc. | ||
1026 | |||
1027 | :yield: A sequence that either contains this string, or is empty. | ||
1028 | |||
1029 | """ | ||
1030 | if types is self.default: | ||
1031 | # This is kept in Tag because it's full of subclasses of | ||
1032 | # this class, which aren't defined until later in the file. | ||
1033 | types = Tag.DEFAULT_INTERESTING_STRING_TYPES | ||
1034 | |||
1035 | # Do nothing if the caller is looking for specific types of | ||
1036 | # string, and we're of a different type. | ||
1037 | # | ||
1038 | # We check specific types instead of using isinstance(self, | ||
1039 | # types) because all of these classes subclass | ||
1040 | # NavigableString. Anyone who's using this feature probably | ||
1041 | # wants generic NavigableStrings but not other stuff. | ||
1042 | my_type = type(self) | ||
1043 | if types is not None: | ||
1044 | if isinstance(types, type): | ||
1045 | # Looking for a single type. | ||
1046 | if my_type is not types: | ||
1047 | return | ||
1048 | elif my_type not in types: | ||
1049 | # Looking for one of a list of types. | ||
1050 | return | ||
1051 | |||
1052 | value = self | ||
1053 | if strip: | ||
1054 | value = value.strip() | ||
1055 | if len(value) > 0: | ||
1056 | yield value | ||
1057 | strings = property(_all_strings) | ||
1058 | |||
726 | class PreformattedString(NavigableString): | 1059 | class PreformattedString(NavigableString): |
727 | """A NavigableString not subject to the normal formatting rules. | 1060 | """A NavigableString not subject to the normal formatting rules. |
728 | 1061 | ||
729 | The string will be passed into the formatter (to trigger side effects), | 1062 | This is an abstract class used for special kinds of strings such |
730 | but the return value will be ignored. | 1063 | as comments (the Comment class) and CDATA blocks (the CData |
1064 | class). | ||
731 | """ | 1065 | """ |
732 | 1066 | ||
733 | def output_ready(self, formatter="minimal"): | 1067 | PREFIX = '' |
734 | """CData strings are passed into the formatter. | 1068 | SUFFIX = '' |
735 | But the return value is ignored.""" | 1069 | |
736 | self.format_string(self, formatter) | 1070 | def output_ready(self, formatter=None): |
1071 | """Make this string ready for output by adding any subclass-specific | ||
1072 | prefix or suffix. | ||
1073 | |||
1074 | :param formatter: A Formatter object, or a string naming one | ||
1075 | of the standard formatters. The string will be passed into the | ||
1076 | Formatter, but only to trigger any side effects: the return | ||
1077 | value is ignored. | ||
1078 | |||
1079 | :return: The string, with any subclass-specific prefix and | ||
1080 | suffix added on. | ||
1081 | """ | ||
1082 | if formatter is not None: | ||
1083 | ignore = self.format_string(self, formatter) | ||
737 | return self.PREFIX + self + self.SUFFIX | 1084 | return self.PREFIX + self + self.SUFFIX |
738 | 1085 | ||
739 | class CData(PreformattedString): | 1086 | class CData(PreformattedString): |
740 | 1087 | """A CDATA block.""" | |
741 | PREFIX = '<![CDATA[' | 1088 | PREFIX = '<![CDATA[' |
742 | SUFFIX = ']]>' | 1089 | SUFFIX = ']]>' |
743 | 1090 | ||
744 | class ProcessingInstruction(PreformattedString): | 1091 | class ProcessingInstruction(PreformattedString): |
1092 | """A SGML processing instruction.""" | ||
745 | 1093 | ||
746 | PREFIX = '<?' | 1094 | PREFIX = '<?' |
747 | SUFFIX = '>' | 1095 | SUFFIX = '>' |
748 | 1096 | ||
749 | class Comment(PreformattedString): | 1097 | class XMLProcessingInstruction(ProcessingInstruction): |
1098 | """An XML processing instruction.""" | ||
1099 | PREFIX = '<?' | ||
1100 | SUFFIX = '?>' | ||
750 | 1101 | ||
1102 | class Comment(PreformattedString): | ||
1103 | """An HTML or XML comment.""" | ||
751 | PREFIX = '<!--' | 1104 | PREFIX = '<!--' |
752 | SUFFIX = '-->' | 1105 | SUFFIX = '-->' |
753 | 1106 | ||
754 | 1107 | ||
755 | class Declaration(PreformattedString): | 1108 | class Declaration(PreformattedString): |
1109 | """An XML declaration.""" | ||
756 | PREFIX = '<?' | 1110 | PREFIX = '<?' |
757 | SUFFIX = '?>' | 1111 | SUFFIX = '?>' |
758 | 1112 | ||
759 | 1113 | ||
760 | class Doctype(PreformattedString): | 1114 | class Doctype(PreformattedString): |
761 | 1115 | """A document type declaration.""" | |
762 | @classmethod | 1116 | @classmethod |
763 | def for_name_and_ids(cls, name, pub_id, system_id): | 1117 | def for_name_and_ids(cls, name, pub_id, system_id): |
1118 | """Generate an appropriate document type declaration for a given | ||
1119 | public ID and system ID. | ||
1120 | |||
1121 | :param name: The name of the document's root element, e.g. 'html'. | ||
1122 | :param pub_id: The Formal Public Identifier for this document type, | ||
1123 | e.g. '-//W3C//DTD XHTML 1.1//EN' | ||
1124 | :param system_id: The system identifier for this document type, | ||
1125 | e.g. 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd' | ||
1126 | |||
1127 | :return: A Doctype. | ||
1128 | """ | ||
764 | value = name or '' | 1129 | value = name or '' |
765 | if pub_id is not None: | 1130 | if pub_id is not None: |
766 | value += ' PUBLIC "%s"' % pub_id | 1131 | value += ' PUBLIC "%s"' % pub_id |
@@ -775,14 +1140,105 @@ class Doctype(PreformattedString): | |||
775 | SUFFIX = '>\n' | 1140 | SUFFIX = '>\n' |
776 | 1141 | ||
777 | 1142 | ||
1143 | class Stylesheet(NavigableString): | ||
1144 | """A NavigableString representing an stylesheet (probably | ||
1145 | CSS). | ||
1146 | |||
1147 | Used to distinguish embedded stylesheets from textual content. | ||
1148 | """ | ||
1149 | pass | ||
1150 | |||
1151 | |||
1152 | class Script(NavigableString): | ||
1153 | """A NavigableString representing an executable script (probably | ||
1154 | Javascript). | ||
1155 | |||
1156 | Used to distinguish executable code from textual content. | ||
1157 | """ | ||
1158 | pass | ||
1159 | |||
1160 | |||
1161 | class TemplateString(NavigableString): | ||
1162 | """A NavigableString representing a string found inside an HTML | ||
1163 | template embedded in a larger document. | ||
1164 | |||
1165 | Used to distinguish such strings from the main body of the document. | ||
1166 | """ | ||
1167 | pass | ||
1168 | |||
1169 | |||
1170 | class RubyTextString(NavigableString): | ||
1171 | """A NavigableString representing the contents of the <rt> HTML | ||
1172 | element. | ||
1173 | |||
1174 | https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rt-element | ||
1175 | |||
1176 | Can be used to distinguish such strings from the strings they're | ||
1177 | annotating. | ||
1178 | """ | ||
1179 | pass | ||
1180 | |||
1181 | |||
1182 | class RubyParenthesisString(NavigableString): | ||
1183 | """A NavigableString representing the contents of the <rp> HTML | ||
1184 | element. | ||
1185 | |||
1186 | https://dev.w3.org/html5/spec-LC/text-level-semantics.html#the-rp-element | ||
1187 | """ | ||
1188 | pass | ||
1189 | |||
1190 | |||
778 | class Tag(PageElement): | 1191 | class Tag(PageElement): |
1192 | """Represents an HTML or XML tag that is part of a parse tree, along | ||
1193 | with its attributes and contents. | ||
779 | 1194 | ||
780 | """Represents a found HTML tag with its attributes and contents.""" | 1195 | When Beautiful Soup parses the markup <b>penguin</b>, it will |
1196 | create a Tag object representing the <b> tag. | ||
1197 | """ | ||
781 | 1198 | ||
782 | def __init__(self, parser=None, builder=None, name=None, namespace=None, | 1199 | def __init__(self, parser=None, builder=None, name=None, namespace=None, |
783 | prefix=None, attrs=None, parent=None, previous=None): | 1200 | prefix=None, attrs=None, parent=None, previous=None, |
784 | "Basic constructor." | 1201 | is_xml=None, sourceline=None, sourcepos=None, |
785 | 1202 | can_be_empty_element=None, cdata_list_attributes=None, | |
1203 | preserve_whitespace_tags=None, | ||
1204 | interesting_string_types=None, | ||
1205 | namespaces=None | ||
1206 | ): | ||
1207 | """Basic constructor. | ||
1208 | |||
1209 | :param parser: A BeautifulSoup object. | ||
1210 | :param builder: A TreeBuilder. | ||
1211 | :param name: The name of the tag. | ||
1212 | :param namespace: The URI of this Tag's XML namespace, if any. | ||
1213 | :param prefix: The prefix for this Tag's XML namespace, if any. | ||
1214 | :param attrs: A dictionary of this Tag's attribute values. | ||
1215 | :param parent: The PageElement to use as this Tag's parent. | ||
1216 | :param previous: The PageElement that was parsed immediately before | ||
1217 | this tag. | ||
1218 | :param is_xml: If True, this is an XML tag. Otherwise, this is an | ||
1219 | HTML tag. | ||
1220 | :param sourceline: The line number where this tag was found in its | ||
1221 | source document. | ||
1222 | :param sourcepos: The character position within `sourceline` where this | ||
1223 | tag was found. | ||
1224 | :param can_be_empty_element: If True, this tag should be | ||
1225 | represented as <tag/>. If False, this tag should be represented | ||
1226 | as <tag></tag>. | ||
1227 | :param cdata_list_attributes: A list of attributes whose values should | ||
1228 | be treated as CDATA if they ever show up on this tag. | ||
1229 | :param preserve_whitespace_tags: A list of tag names whose contents | ||
1230 | should have their whitespace preserved. | ||
1231 | :param interesting_string_types: This is a NavigableString | ||
1232 | subclass or a tuple of them. When iterating over this | ||
1233 | Tag's strings in methods like Tag.strings or Tag.get_text, | ||
1234 | these are the types of strings that are interesting enough | ||
1235 | to be considered. The default is to consider | ||
1236 | NavigableString and CData the only interesting string | ||
1237 | subtypes. | ||
1238 | :param namespaces: A dictionary mapping currently active | ||
1239 | namespace prefixes to URIs. This can be used later to | ||
1240 | construct CSS selectors. | ||
1241 | """ | ||
786 | if parser is None: | 1242 | if parser is None: |
787 | self.parser_class = None | 1243 | self.parser_class = None |
788 | else: | 1244 | else: |
@@ -793,7 +1249,12 @@ class Tag(PageElement): | |||
793 | raise ValueError("No value provided for new tag's name.") | 1249 | raise ValueError("No value provided for new tag's name.") |
794 | self.name = name | 1250 | self.name = name |
795 | self.namespace = namespace | 1251 | self.namespace = namespace |
1252 | self._namespaces = namespaces or {} | ||
796 | self.prefix = prefix | 1253 | self.prefix = prefix |
1254 | if ((not builder or builder.store_line_numbers) | ||
1255 | and (sourceline is not None or sourcepos is not None)): | ||
1256 | self.sourceline = sourceline | ||
1257 | self.sourcepos = sourcepos | ||
797 | if attrs is None: | 1258 | if attrs is None: |
798 | attrs = {} | 1259 | attrs = {} |
799 | elif attrs: | 1260 | elif attrs: |
@@ -804,32 +1265,109 @@ class Tag(PageElement): | |||
804 | attrs = dict(attrs) | 1265 | attrs = dict(attrs) |
805 | else: | 1266 | else: |
806 | attrs = dict(attrs) | 1267 | attrs = dict(attrs) |
1268 | |||
1269 | # If possible, determine ahead of time whether this tag is an | ||
1270 | # XML tag. | ||
1271 | if builder: | ||
1272 | self.known_xml = builder.is_xml | ||
1273 | else: | ||
1274 | self.known_xml = is_xml | ||
807 | self.attrs = attrs | 1275 | self.attrs = attrs |
808 | self.contents = [] | 1276 | self.contents = [] |
809 | self.setup(parent, previous) | 1277 | self.setup(parent, previous) |
810 | self.hidden = False | 1278 | self.hidden = False |
811 | 1279 | ||
812 | # Set up any substitutions, such as the charset in a META tag. | 1280 | if builder is None: |
813 | if builder is not None: | 1281 | # In the absence of a TreeBuilder, use whatever values were |
1282 | # passed in here. They're probably None, unless this is a copy of some | ||
1283 | # other tag. | ||
1284 | self.can_be_empty_element = can_be_empty_element | ||
1285 | self.cdata_list_attributes = cdata_list_attributes | ||
1286 | self.preserve_whitespace_tags = preserve_whitespace_tags | ||
1287 | self.interesting_string_types = interesting_string_types | ||
1288 | else: | ||
1289 | # Set up any substitutions for this tag, such as the charset in a META tag. | ||
814 | builder.set_up_substitutions(self) | 1290 | builder.set_up_substitutions(self) |
1291 | |||
1292 | # Ask the TreeBuilder whether this tag might be an empty-element tag. | ||
815 | self.can_be_empty_element = builder.can_be_empty_element(name) | 1293 | self.can_be_empty_element = builder.can_be_empty_element(name) |
816 | else: | 1294 | |
817 | self.can_be_empty_element = False | 1295 | # Keep track of the list of attributes of this tag that |
1296 | # might need to be treated as a list. | ||
1297 | # | ||
1298 | # For performance reasons, we store the whole data structure | ||
1299 | # rather than asking the question of every tag. Asking would | ||
1300 | # require building a new data structure every time, and | ||
1301 | # (unlike can_be_empty_element), we almost never need | ||
1302 | # to check this. | ||
1303 | self.cdata_list_attributes = builder.cdata_list_attributes | ||
1304 | |||
1305 | # Keep track of the names that might cause this tag to be treated as a | ||
1306 | # whitespace-preserved tag. | ||
1307 | self.preserve_whitespace_tags = builder.preserve_whitespace_tags | ||
1308 | |||
1309 | if self.name in builder.string_containers: | ||
1310 | # This sort of tag uses a special string container | ||
1311 | # subclass for most of its strings. When we ask the | ||
1312 | self.interesting_string_types = builder.string_containers[self.name] | ||
1313 | else: | ||
1314 | self.interesting_string_types = self.DEFAULT_INTERESTING_STRING_TYPES | ||
818 | 1315 | ||
819 | parserClass = _alias("parser_class") # BS3 | 1316 | parserClass = _alias("parser_class") # BS3 |
820 | 1317 | ||
821 | def __copy__(self): | 1318 | def __deepcopy__(self, memo, recursive=True): |
822 | """A copy of a Tag is a new Tag, unconnected to the parse tree. | 1319 | """A deepcopy of a Tag is a new Tag, unconnected to the parse tree. |
823 | Its contents are a copy of the old Tag's contents. | 1320 | Its contents are a copy of the old Tag's contents. |
824 | """ | 1321 | """ |
825 | clone = type(self)(None, self.builder, self.name, self.namespace, | 1322 | clone = self._clone() |
826 | self.nsprefix, self.attrs) | 1323 | |
1324 | if recursive: | ||
1325 | # Clone this tag's descendants recursively, but without | ||
1326 | # making any recursive function calls. | ||
1327 | tag_stack = [clone] | ||
1328 | for event, element in self._event_stream(self.descendants): | ||
1329 | if event is Tag.END_ELEMENT_EVENT: | ||
1330 | # Stop appending incoming Tags to the Tag that was | ||
1331 | # just closed. | ||
1332 | tag_stack.pop() | ||
1333 | else: | ||
1334 | descendant_clone = element.__deepcopy__( | ||
1335 | memo, recursive=False | ||
1336 | ) | ||
1337 | # Add to its parent's .contents | ||
1338 | tag_stack[-1].append(descendant_clone) | ||
1339 | |||
1340 | if event is Tag.START_ELEMENT_EVENT: | ||
1341 | # Add the Tag itself to the stack so that its | ||
1342 | # children will be .appended to it. | ||
1343 | tag_stack.append(descendant_clone) | ||
1344 | return clone | ||
1345 | |||
1346 | def __copy__(self): | ||
1347 | """A copy of a Tag must always be a deep copy, because a Tag's | ||
1348 | children can only have one parent at a time. | ||
1349 | """ | ||
1350 | return self.__deepcopy__({}) | ||
1351 | |||
1352 | def _clone(self): | ||
1353 | """Create a new Tag just like this one, but with no | ||
1354 | contents and unattached to any parse tree. | ||
1355 | |||
1356 | This is the first step in the deepcopy process. | ||
1357 | """ | ||
1358 | clone = type(self)( | ||
1359 | None, None, self.name, self.namespace, | ||
1360 | self.prefix, self.attrs, is_xml=self._is_xml, | ||
1361 | sourceline=self.sourceline, sourcepos=self.sourcepos, | ||
1362 | can_be_empty_element=self.can_be_empty_element, | ||
1363 | cdata_list_attributes=self.cdata_list_attributes, | ||
1364 | preserve_whitespace_tags=self.preserve_whitespace_tags, | ||
1365 | interesting_string_types=self.interesting_string_types | ||
1366 | ) | ||
827 | for attr in ('can_be_empty_element', 'hidden'): | 1367 | for attr in ('can_be_empty_element', 'hidden'): |
828 | setattr(clone, attr, getattr(self, attr)) | 1368 | setattr(clone, attr, getattr(self, attr)) |
829 | for child in self.contents: | ||
830 | clone.append(child.__copy__()) | ||
831 | return clone | 1369 | return clone |
832 | 1370 | ||
833 | @property | 1371 | @property |
834 | def is_empty_element(self): | 1372 | def is_empty_element(self): |
835 | """Is this tag an empty-element tag? (aka a self-closing tag) | 1373 | """Is this tag an empty-element tag? (aka a self-closing tag) |
@@ -850,13 +1388,17 @@ class Tag(PageElement): | |||
850 | 1388 | ||
851 | @property | 1389 | @property |
852 | def string(self): | 1390 | def string(self): |
853 | """Convenience property to get the single string within this tag. | 1391 | """Convenience property to get the single string within this |
1392 | PageElement. | ||
854 | 1393 | ||
855 | :Return: If this tag has a single string child, return value | 1394 | TODO It might make sense to have NavigableString.string return |
856 | is that string. If this tag has no children, or more than one | 1395 | itself. |
857 | child, return value is None. If this tag has one child tag, | 1396 | |
1397 | :return: If this element has a single string child, return | ||
1398 | value is that string. If this element has one child tag, | ||
858 | return value is the 'string' attribute of the child tag, | 1399 | return value is the 'string' attribute of the child tag, |
859 | recursively. | 1400 | recursively. If this element is itself a string, has no |
1401 | children, or has more than one child, return value is None. | ||
860 | """ | 1402 | """ |
861 | if len(self.contents) != 1: | 1403 | if len(self.contents) != 1: |
862 | return None | 1404 | return None |
@@ -867,57 +1409,75 @@ class Tag(PageElement): | |||
867 | 1409 | ||
868 | @string.setter | 1410 | @string.setter |
869 | def string(self, string): | 1411 | def string(self, string): |
1412 | """Replace this PageElement's contents with `string`.""" | ||
870 | self.clear() | 1413 | self.clear() |
871 | self.append(string.__class__(string)) | 1414 | self.append(string.__class__(string)) |
872 | 1415 | ||
873 | def _all_strings(self, strip=False, types=(NavigableString, CData)): | 1416 | DEFAULT_INTERESTING_STRING_TYPES = (NavigableString, CData) |
1417 | def _all_strings(self, strip=False, types=PageElement.default): | ||
874 | """Yield all strings of certain classes, possibly stripping them. | 1418 | """Yield all strings of certain classes, possibly stripping them. |
875 | 1419 | ||
876 | By default, yields only NavigableString and CData objects. So | 1420 | :param strip: If True, all strings will be stripped before being |
877 | no comments, processing instructions, etc. | 1421 | yielded. |
1422 | |||
1423 | :param types: A tuple of NavigableString subclasses. Any strings of | ||
1424 | a subclass not found in this list will be ignored. By | ||
1425 | default, the subclasses considered are the ones found in | ||
1426 | self.interesting_string_types. If that's not specified, | ||
1427 | only NavigableString and CData objects will be | ||
1428 | considered. That means no comments, processing | ||
1429 | instructions, etc. | ||
1430 | |||
1431 | :yield: A sequence of strings. | ||
1432 | |||
878 | """ | 1433 | """ |
1434 | if types is self.default: | ||
1435 | types = self.interesting_string_types | ||
1436 | |||
879 | for descendant in self.descendants: | 1437 | for descendant in self.descendants: |
880 | if ( | 1438 | if (types is None and not isinstance(descendant, NavigableString)): |
881 | (types is None and not isinstance(descendant, NavigableString)) | 1439 | continue |
882 | or | 1440 | descendant_type = type(descendant) |
883 | (types is not None and type(descendant) not in types)): | 1441 | if isinstance(types, type): |
1442 | if descendant_type is not types: | ||
1443 | # We're not interested in strings of this type. | ||
1444 | continue | ||
1445 | elif types is not None and descendant_type not in types: | ||
1446 | # We're not interested in strings of this type. | ||
884 | continue | 1447 | continue |
885 | if strip: | 1448 | if strip: |
886 | descendant = descendant.strip() | 1449 | descendant = descendant.strip() |
887 | if len(descendant) == 0: | 1450 | if len(descendant) == 0: |
888 | continue | 1451 | continue |
889 | yield descendant | 1452 | yield descendant |
890 | |||
891 | strings = property(_all_strings) | 1453 | strings = property(_all_strings) |
892 | 1454 | ||
893 | @property | 1455 | def decompose(self): |
894 | def stripped_strings(self): | 1456 | """Recursively destroys this PageElement and its children. |
895 | for string in self._all_strings(True): | ||
896 | yield string | ||
897 | 1457 | ||
898 | def get_text(self, separator="", strip=False, | 1458 | This element will be removed from the tree and wiped out; so |
899 | types=(NavigableString, CData)): | 1459 | will everything beneath it. |
900 | """ | ||
901 | Get all child strings, concatenated using the given separator. | ||
902 | """ | ||
903 | return separator.join([s for s in self._all_strings( | ||
904 | strip, types=types)]) | ||
905 | getText = get_text | ||
906 | text = property(get_text) | ||
907 | 1460 | ||
908 | def decompose(self): | 1461 | The behavior of a decomposed PageElement is undefined and you |
909 | """Recursively destroys the contents of this tree.""" | 1462 | should never use one for anything, but if you need to _check_ |
1463 | whether an element has been decomposed, you can use the | ||
1464 | `decomposed` property. | ||
1465 | """ | ||
910 | self.extract() | 1466 | self.extract() |
911 | i = self | 1467 | i = self |
912 | while i is not None: | 1468 | while i is not None: |
913 | next = i.next_element | 1469 | n = i.next_element |
914 | i.__dict__.clear() | 1470 | i.__dict__.clear() |
915 | i.contents = [] | 1471 | i.contents = [] |
916 | i = next | 1472 | i._decomposed = True |
1473 | i = n | ||
917 | 1474 | ||
918 | def clear(self, decompose=False): | 1475 | def clear(self, decompose=False): |
919 | """ | 1476 | """Wipe out all children of this PageElement by calling extract() |
920 | Extract all children. If decompose is True, decompose instead. | 1477 | on them. |
1478 | |||
1479 | :param decompose: If this is True, decompose() (a more | ||
1480 | destructive method) will be called instead of extract(). | ||
921 | """ | 1481 | """ |
922 | if decompose: | 1482 | if decompose: |
923 | for element in self.contents[:]: | 1483 | for element in self.contents[:]: |
@@ -929,10 +1489,51 @@ class Tag(PageElement): | |||
929 | for element in self.contents[:]: | 1489 | for element in self.contents[:]: |
930 | element.extract() | 1490 | element.extract() |
931 | 1491 | ||
932 | def index(self, element): | 1492 | def smooth(self): |
1493 | """Smooth out this element's children by consolidating consecutive | ||
1494 | strings. | ||
1495 | |||
1496 | This makes pretty-printed output look more natural following a | ||
1497 | lot of operations that modified the tree. | ||
933 | """ | 1498 | """ |
934 | Find the index of a child by identity, not value. Avoids issues with | 1499 | # Mark the first position of every pair of children that need |
935 | tag.contents.index(element) getting the index of equal elements. | 1500 | # to be consolidated. Do this rather than making a copy of |
1501 | # self.contents, since in most cases very few strings will be | ||
1502 | # affected. | ||
1503 | marked = [] | ||
1504 | for i, a in enumerate(self.contents): | ||
1505 | if isinstance(a, Tag): | ||
1506 | # Recursively smooth children. | ||
1507 | a.smooth() | ||
1508 | if i == len(self.contents)-1: | ||
1509 | # This is the last item in .contents, and it's not a | ||
1510 | # tag. There's no chance it needs any work. | ||
1511 | continue | ||
1512 | b = self.contents[i+1] | ||
1513 | if (isinstance(a, NavigableString) | ||
1514 | and isinstance(b, NavigableString) | ||
1515 | and not isinstance(a, PreformattedString) | ||
1516 | and not isinstance(b, PreformattedString) | ||
1517 | ): | ||
1518 | marked.append(i) | ||
1519 | |||
1520 | # Go over the marked positions in reverse order, so that | ||
1521 | # removing items from .contents won't affect the remaining | ||
1522 | # positions. | ||
1523 | for i in reversed(marked): | ||
1524 | a = self.contents[i] | ||
1525 | b = self.contents[i+1] | ||
1526 | b.extract() | ||
1527 | n = NavigableString(a+b) | ||
1528 | a.replace_with(n) | ||
1529 | |||
1530 | def index(self, element): | ||
1531 | """Find the index of a child by identity, not value. | ||
1532 | |||
1533 | Avoids issues with tag.contents.index(element) getting the | ||
1534 | index of equal elements. | ||
1535 | |||
1536 | :param element: Look for this PageElement in `self.contents`. | ||
936 | """ | 1537 | """ |
937 | for i, child in enumerate(self.contents): | 1538 | for i, child in enumerate(self.contents): |
938 | if child is element: | 1539 | if child is element: |
@@ -945,23 +1546,38 @@ class Tag(PageElement): | |||
945 | attribute.""" | 1546 | attribute.""" |
946 | return self.attrs.get(key, default) | 1547 | return self.attrs.get(key, default) |
947 | 1548 | ||
1549 | def get_attribute_list(self, key, default=None): | ||
1550 | """The same as get(), but always returns a list. | ||
1551 | |||
1552 | :param key: The attribute to look for. | ||
1553 | :param default: Use this value if the attribute is not present | ||
1554 | on this PageElement. | ||
1555 | :return: A list of values, probably containing only a single | ||
1556 | value. | ||
1557 | """ | ||
1558 | value = self.get(key, default) | ||
1559 | if not isinstance(value, list): | ||
1560 | value = [value] | ||
1561 | return value | ||
1562 | |||
948 | def has_attr(self, key): | 1563 | def has_attr(self, key): |
1564 | """Does this PageElement have an attribute with the given name?""" | ||
949 | return key in self.attrs | 1565 | return key in self.attrs |
950 | 1566 | ||
951 | def __hash__(self): | 1567 | def __hash__(self): |
952 | return str(self).__hash__() | 1568 | return str(self).__hash__() |
953 | 1569 | ||
954 | def __getitem__(self, key): | 1570 | def __getitem__(self, key): |
955 | """tag[key] returns the value of the 'key' attribute for the tag, | 1571 | """tag[key] returns the value of the 'key' attribute for the Tag, |
956 | and throws an exception if it's not there.""" | 1572 | and throws an exception if it's not there.""" |
957 | return self.attrs[key] | 1573 | return self.attrs[key] |
958 | 1574 | ||
959 | def __iter__(self): | 1575 | def __iter__(self): |
960 | "Iterating over a tag iterates over its contents." | 1576 | "Iterating over a Tag iterates over its contents." |
961 | return iter(self.contents) | 1577 | return iter(self.contents) |
962 | 1578 | ||
963 | def __len__(self): | 1579 | def __len__(self): |
964 | "The length of a tag is the length of its list of contents." | 1580 | "The length of a Tag is the length of its list of contents." |
965 | return len(self.contents) | 1581 | return len(self.contents) |
966 | 1582 | ||
967 | def __contains__(self, x): | 1583 | def __contains__(self, x): |
@@ -981,29 +1597,33 @@ class Tag(PageElement): | |||
981 | self.attrs.pop(key, None) | 1597 | self.attrs.pop(key, None) |
982 | 1598 | ||
983 | def __call__(self, *args, **kwargs): | 1599 | def __call__(self, *args, **kwargs): |
984 | """Calling a tag like a function is the same as calling its | 1600 | """Calling a Tag like a function is the same as calling its |
985 | find_all() method. Eg. tag('a') returns a list of all the A tags | 1601 | find_all() method. Eg. tag('a') returns a list of all the A tags |
986 | found within this tag.""" | 1602 | found within this tag.""" |
987 | return self.find_all(*args, **kwargs) | 1603 | return self.find_all(*args, **kwargs) |
988 | 1604 | ||
989 | def __getattr__(self, tag): | 1605 | def __getattr__(self, tag): |
990 | #print "Getattr %s.%s" % (self.__class__, tag) | 1606 | """Calling tag.subtag is the same as calling tag.find(name="subtag")""" |
1607 | #print("Getattr %s.%s" % (self.__class__, tag)) | ||
991 | if len(tag) > 3 and tag.endswith('Tag'): | 1608 | if len(tag) > 3 and tag.endswith('Tag'): |
992 | # BS3: soup.aTag -> "soup.find("a") | 1609 | # BS3: soup.aTag -> "soup.find("a") |
993 | tag_name = tag[:-3] | 1610 | tag_name = tag[:-3] |
994 | warnings.warn( | 1611 | warnings.warn( |
995 | '.%sTag is deprecated, use .find("%s") instead.' % ( | 1612 | '.%(name)sTag is deprecated, use .find("%(name)s") instead. If you really were looking for a tag called %(name)sTag, use .find("%(name)sTag")' % dict( |
996 | tag_name, tag_name)) | 1613 | name=tag_name |
1614 | ), | ||
1615 | DeprecationWarning, stacklevel=2 | ||
1616 | ) | ||
997 | return self.find(tag_name) | 1617 | return self.find(tag_name) |
998 | # We special case contents to avoid recursion. | 1618 | # We special case contents to avoid recursion. |
999 | elif not tag.startswith("__") and not tag=="contents": | 1619 | elif not tag.startswith("__") and not tag == "contents": |
1000 | return self.find(tag) | 1620 | return self.find(tag) |
1001 | raise AttributeError( | 1621 | raise AttributeError( |
1002 | "'%s' object has no attribute '%s'" % (self.__class__, tag)) | 1622 | "'%s' object has no attribute '%s'" % (self.__class__, tag)) |
1003 | 1623 | ||
1004 | def __eq__(self, other): | 1624 | def __eq__(self, other): |
1005 | """Returns true iff this tag has the same name, the same attributes, | 1625 | """Returns true iff this Tag has the same name, the same attributes, |
1006 | and the same contents (recursively) as the given tag.""" | 1626 | and the same contents (recursively) as `other`.""" |
1007 | if self is other: | 1627 | if self is other: |
1008 | return True | 1628 | return True |
1009 | if (not hasattr(other, 'name') or | 1629 | if (not hasattr(other, 'name') or |
@@ -1019,69 +1639,235 @@ class Tag(PageElement): | |||
1019 | return True | 1639 | return True |
1020 | 1640 | ||
1021 | def __ne__(self, other): | 1641 | def __ne__(self, other): |
1022 | """Returns true iff this tag is not identical to the other tag, | 1642 | """Returns true iff this Tag is not identical to `other`, |
1023 | as defined in __eq__.""" | 1643 | as defined in __eq__.""" |
1024 | return not self == other | 1644 | return not self == other |
1025 | 1645 | ||
1026 | def __repr__(self, encoding="unicode-escape"): | 1646 | def __repr__(self, encoding="unicode-escape"): |
1027 | """Renders this tag as a string.""" | 1647 | """Renders this PageElement as a string. |
1028 | if PY3K: | ||
1029 | # "The return value must be a string object", i.e. Unicode | ||
1030 | return self.decode() | ||
1031 | else: | ||
1032 | # "The return value must be a string object", i.e. a bytestring. | ||
1033 | # By convention, the return value of __repr__ should also be | ||
1034 | # an ASCII string. | ||
1035 | return self.encode(encoding) | ||
1036 | 1648 | ||
1037 | def __unicode__(self): | 1649 | :param encoding: The encoding to use (Python 2 only). |
1650 | TODO: This is now ignored and a warning should be issued | ||
1651 | if a value is provided. | ||
1652 | :return: A (Unicode) string. | ||
1653 | """ | ||
1654 | # "The return value must be a string object", i.e. Unicode | ||
1038 | return self.decode() | 1655 | return self.decode() |
1039 | 1656 | ||
1040 | def __str__(self): | 1657 | def __unicode__(self): |
1041 | if PY3K: | 1658 | """Renders this PageElement as a Unicode string.""" |
1042 | return self.decode() | 1659 | return self.decode() |
1043 | else: | ||
1044 | return self.encode() | ||
1045 | 1660 | ||
1046 | if PY3K: | 1661 | __str__ = __repr__ = __unicode__ |
1047 | __str__ = __repr__ = __unicode__ | ||
1048 | 1662 | ||
1049 | def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, | 1663 | def encode(self, encoding=DEFAULT_OUTPUT_ENCODING, |
1050 | indent_level=None, formatter="minimal", | 1664 | indent_level=None, formatter="minimal", |
1051 | errors="xmlcharrefreplace"): | 1665 | errors="xmlcharrefreplace"): |
1666 | """Render a bytestring representation of this PageElement and its | ||
1667 | contents. | ||
1668 | |||
1669 | :param encoding: The destination encoding. | ||
1670 | :param indent_level: Each line of the rendering will be | ||
1671 | indented this many levels. (The formatter decides what a | ||
1672 | 'level' means in terms of spaces or other characters | ||
1673 | output.) Used internally in recursive calls while | ||
1674 | pretty-printing. | ||
1675 | :param formatter: A Formatter object, or a string naming one of | ||
1676 | the standard formatters. | ||
1677 | :param errors: An error handling strategy such as | ||
1678 | 'xmlcharrefreplace'. This value is passed along into | ||
1679 | encode() and its value should be one of the constants | ||
1680 | defined by Python. | ||
1681 | :return: A bytestring. | ||
1682 | |||
1683 | """ | ||
1052 | # Turn the data structure into Unicode, then encode the | 1684 | # Turn the data structure into Unicode, then encode the |
1053 | # Unicode. | 1685 | # Unicode. |
1054 | u = self.decode(indent_level, encoding, formatter) | 1686 | u = self.decode(indent_level, encoding, formatter) |
1055 | return u.encode(encoding, errors) | 1687 | return u.encode(encoding, errors) |
1056 | 1688 | ||
1057 | def _should_pretty_print(self, indent_level): | ||
1058 | """Should this tag be pretty-printed?""" | ||
1059 | return ( | ||
1060 | indent_level is not None and | ||
1061 | (self.name not in HTMLAwareEntitySubstitution.preformatted_tags | ||
1062 | or self._is_xml)) | ||
1063 | |||
1064 | def decode(self, indent_level=None, | 1689 | def decode(self, indent_level=None, |
1065 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, | 1690 | eventual_encoding=DEFAULT_OUTPUT_ENCODING, |
1066 | formatter="minimal"): | 1691 | formatter="minimal", |
1067 | """Returns a Unicode representation of this tag and its contents. | 1692 | iterator=None): |
1693 | pieces = [] | ||
1694 | # First off, turn a non-Formatter `formatter` into a Formatter | ||
1695 | # object. This will stop the lookup from happening over and | ||
1696 | # over again. | ||
1697 | if not isinstance(formatter, Formatter): | ||
1698 | formatter = self.formatter_for_name(formatter) | ||
1699 | |||
1700 | if indent_level is True: | ||
1701 | indent_level = 0 | ||
1702 | |||
1703 | # The currently active tag that put us into string literal | ||
1704 | # mode. Until this element is closed, children will be treated | ||
1705 | # as string literals and not pretty-printed. String literal | ||
1706 | # mode is turned on immediately after this tag begins, and | ||
1707 | # turned off immediately before it's closed. This means there | ||
1708 | # will be whitespace before and after the tag itself. | ||
1709 | string_literal_tag = None | ||
1710 | |||
1711 | for event, element in self._event_stream(iterator): | ||
1712 | if event in (Tag.START_ELEMENT_EVENT, Tag.EMPTY_ELEMENT_EVENT): | ||
1713 | piece = element._format_tag( | ||
1714 | eventual_encoding, formatter, opening=True | ||
1715 | ) | ||
1716 | elif event is Tag.END_ELEMENT_EVENT: | ||
1717 | piece = element._format_tag( | ||
1718 | eventual_encoding, formatter, opening=False | ||
1719 | ) | ||
1720 | if indent_level is not None: | ||
1721 | indent_level -= 1 | ||
1722 | else: | ||
1723 | piece = element.output_ready(formatter) | ||
1724 | |||
1725 | # Now we need to apply the 'prettiness' -- extra | ||
1726 | # whitespace before and/or after this tag. This can get | ||
1727 | # complicated because certain tags, like <pre> and | ||
1728 | # <script>, can't be prettified, since adding whitespace would | ||
1729 | # change the meaning of the content. | ||
1730 | |||
1731 | # The default behavior is to add whitespace before and | ||
1732 | # after an element when string literal mode is off, and to | ||
1733 | # leave things as they are when string literal mode is on. | ||
1734 | if string_literal_tag: | ||
1735 | indent_before = indent_after = False | ||
1736 | else: | ||
1737 | indent_before = indent_after = True | ||
1738 | |||
1739 | # The only time the behavior is more complex than that is | ||
1740 | # when we encounter an opening or closing tag that might | ||
1741 | # put us into or out of string literal mode. | ||
1742 | if (event is Tag.START_ELEMENT_EVENT | ||
1743 | and not string_literal_tag | ||
1744 | and not element._should_pretty_print()): | ||
1745 | # We are about to enter string literal mode. Add | ||
1746 | # whitespace before this tag, but not after. We | ||
1747 | # will stay in string literal mode until this tag | ||
1748 | # is closed. | ||
1749 | indent_before = True | ||
1750 | indent_after = False | ||
1751 | string_literal_tag = element | ||
1752 | elif (event is Tag.END_ELEMENT_EVENT | ||
1753 | and element is string_literal_tag): | ||
1754 | # We are about to exit string literal mode by closing | ||
1755 | # the tag that sent us into that mode. Add whitespace | ||
1756 | # after this tag, but not before. | ||
1757 | indent_before = False | ||
1758 | indent_after = True | ||
1759 | string_literal_tag = None | ||
1760 | |||
1761 | # Now we know whether to add whitespace before and/or | ||
1762 | # after this element. | ||
1763 | if indent_level is not None: | ||
1764 | if (indent_before or indent_after): | ||
1765 | if isinstance(element, NavigableString): | ||
1766 | piece = piece.strip() | ||
1767 | if piece: | ||
1768 | piece = self._indent_string( | ||
1769 | piece, indent_level, formatter, | ||
1770 | indent_before, indent_after | ||
1771 | ) | ||
1772 | if event == Tag.START_ELEMENT_EVENT: | ||
1773 | indent_level += 1 | ||
1774 | pieces.append(piece) | ||
1775 | return "".join(pieces) | ||
1776 | |||
1777 | # Names for the different events yielded by _event_stream | ||
1778 | START_ELEMENT_EVENT = object() | ||
1779 | END_ELEMENT_EVENT = object() | ||
1780 | EMPTY_ELEMENT_EVENT = object() | ||
1781 | STRING_ELEMENT_EVENT = object() | ||
1782 | |||
1783 | def _event_stream(self, iterator=None): | ||
1784 | """Yield a sequence of events that can be used to reconstruct the DOM | ||
1785 | for this element. | ||
1786 | |||
1787 | This lets us recreate the nested structure of this element | ||
1788 | (e.g. when formatting it as a string) without using recursive | ||
1789 | method calls. | ||
1790 | |||
1791 | This is similar in concept to the SAX API, but it's a simpler | ||
1792 | interface designed for internal use. The events are different | ||
1793 | from SAX and the arguments associated with the events are Tags | ||
1794 | and other Beautiful Soup objects. | ||
1795 | |||
1796 | :param iterator: An alternate iterator to use when traversing | ||
1797 | the tree. | ||
1798 | """ | ||
1799 | tag_stack = [] | ||
1068 | 1800 | ||
1069 | :param eventual_encoding: The tag is destined to be | 1801 | iterator = iterator or self.self_and_descendants |
1070 | encoded into this encoding. This method is _not_ | 1802 | |
1071 | responsible for performing that encoding. This information | 1803 | for c in iterator: |
1072 | is passed in so that it can be substituted in if the | 1804 | # If the parent of the element we're about to yield is not |
1073 | document contains a <META> tag that mentions the document's | 1805 | # the tag currently on the stack, it means that the tag on |
1074 | encoding. | 1806 | # the stack closed before this element appeared. |
1807 | while tag_stack and c.parent != tag_stack[-1]: | ||
1808 | now_closed_tag = tag_stack.pop() | ||
1809 | yield Tag.END_ELEMENT_EVENT, now_closed_tag | ||
1810 | |||
1811 | if isinstance(c, Tag): | ||
1812 | if c.is_empty_element: | ||
1813 | yield Tag.EMPTY_ELEMENT_EVENT, c | ||
1814 | else: | ||
1815 | yield Tag.START_ELEMENT_EVENT, c | ||
1816 | tag_stack.append(c) | ||
1817 | continue | ||
1818 | else: | ||
1819 | yield Tag.STRING_ELEMENT_EVENT, c | ||
1820 | |||
1821 | while tag_stack: | ||
1822 | now_closed_tag = tag_stack.pop() | ||
1823 | yield Tag.END_ELEMENT_EVENT, now_closed_tag | ||
1824 | |||
1825 | def _indent_string(self, s, indent_level, formatter, | ||
1826 | indent_before, indent_after): | ||
1827 | """Add indentation whitespace before and/or after a string. | ||
1828 | |||
1829 | :param s: The string to amend with whitespace. | ||
1830 | :param indent_level: The indentation level; affects how much | ||
1831 | whitespace goes before the string. | ||
1832 | :param indent_before: Whether or not to add whitespace | ||
1833 | before the string. | ||
1834 | :param indent_after: Whether or not to add whitespace | ||
1835 | (a newline) after the string. | ||
1075 | """ | 1836 | """ |
1837 | space_before = '' | ||
1838 | if indent_before and indent_level: | ||
1839 | space_before = (formatter.indent * indent_level) | ||
1076 | 1840 | ||
1077 | # First off, turn a string formatter into a function. This | 1841 | space_after = '' |
1078 | # will stop the lookup from happening over and over again. | 1842 | if indent_after: |
1079 | if not isinstance(formatter, collections.abc.Callable): | 1843 | space_after = "\n" |
1080 | formatter = self._formatter_for_name(formatter) | ||
1081 | 1844 | ||
1082 | attrs = [] | 1845 | return space_before + s + space_after |
1083 | if self.attrs: | 1846 | |
1084 | for key, val in sorted(self.attrs.items()): | 1847 | def _format_tag(self, eventual_encoding, formatter, opening): |
1848 | if self.hidden: | ||
1849 | # A hidden tag is invisible, although its contents | ||
1850 | # are visible. | ||
1851 | return '' | ||
1852 | |||
1853 | # A tag starts with the < character (see below). | ||
1854 | |||
1855 | # Then the / character, if this is a closing tag. | ||
1856 | closing_slash = '' | ||
1857 | if not opening: | ||
1858 | closing_slash = '/' | ||
1859 | |||
1860 | # Then an optional namespace prefix. | ||
1861 | prefix = '' | ||
1862 | if self.prefix: | ||
1863 | prefix = self.prefix + ":" | ||
1864 | |||
1865 | # Then a list of attribute values, if this is an opening tag. | ||
1866 | attribute_string = '' | ||
1867 | if opening: | ||
1868 | attributes = formatter.attributes(self) | ||
1869 | attrs = [] | ||
1870 | for key, val in attributes: | ||
1085 | if val is None: | 1871 | if val is None: |
1086 | decoded = key | 1872 | decoded = key |
1087 | else: | 1873 | else: |
@@ -1090,71 +1876,52 @@ class Tag(PageElement): | |||
1090 | elif not isinstance(val, str): | 1876 | elif not isinstance(val, str): |
1091 | val = str(val) | 1877 | val = str(val) |
1092 | elif ( | 1878 | elif ( |
1093 | isinstance(val, AttributeValueWithCharsetSubstitution) | 1879 | isinstance(val, AttributeValueWithCharsetSubstitution) |
1094 | and eventual_encoding is not None): | 1880 | and eventual_encoding is not None |
1881 | ): | ||
1095 | val = val.encode(eventual_encoding) | 1882 | val = val.encode(eventual_encoding) |
1096 | 1883 | ||
1097 | text = self.format_string(val, formatter) | 1884 | text = formatter.attribute_value(val) |
1098 | decoded = ( | 1885 | decoded = ( |
1099 | str(key) + '=' | 1886 | str(key) + '=' |
1100 | + EntitySubstitution.quoted_attribute_value(text)) | 1887 | + formatter.quoted_attribute_value(text)) |
1101 | attrs.append(decoded) | 1888 | attrs.append(decoded) |
1102 | close = '' | 1889 | if attrs: |
1103 | closeTag = '' | 1890 | attribute_string = ' ' + ' '.join(attrs) |
1104 | |||
1105 | prefix = '' | ||
1106 | if self.prefix: | ||
1107 | prefix = self.prefix + ":" | ||
1108 | 1891 | ||
1892 | # Then an optional closing slash (for a void element in an | ||
1893 | # XML document). | ||
1894 | void_element_closing_slash = '' | ||
1109 | if self.is_empty_element: | 1895 | if self.is_empty_element: |
1110 | close = '/' | 1896 | void_element_closing_slash = formatter.void_element_close_prefix or '' |
1111 | else: | ||
1112 | closeTag = '</%s%s>' % (prefix, self.name) | ||
1113 | |||
1114 | pretty_print = self._should_pretty_print(indent_level) | ||
1115 | space = '' | ||
1116 | indent_space = '' | ||
1117 | if indent_level is not None: | ||
1118 | indent_space = (' ' * (indent_level - 1)) | ||
1119 | if pretty_print: | ||
1120 | space = indent_space | ||
1121 | indent_contents = indent_level + 1 | ||
1122 | else: | ||
1123 | indent_contents = None | ||
1124 | contents = self.decode_contents( | ||
1125 | indent_contents, eventual_encoding, formatter) | ||
1126 | 1897 | ||
1127 | if self.hidden: | 1898 | # Put it all together. |
1128 | # This is the 'document root' object. | 1899 | return '<' + closing_slash + prefix + self.name + attribute_string + void_element_closing_slash + '>' |
1129 | s = contents | 1900 | |
1130 | else: | 1901 | def _should_pretty_print(self, indent_level=1): |
1131 | s = [] | 1902 | """Should this tag be pretty-printed? |
1132 | attribute_string = '' | 1903 | |
1133 | if attrs: | 1904 | Most of them should, but some (such as <pre> in HTML |
1134 | attribute_string = ' ' + ' '.join(attrs) | 1905 | documents) should not. |
1135 | if indent_level is not None: | 1906 | """ |
1136 | # Even if this particular tag is not pretty-printed, | 1907 | return ( |
1137 | # we should indent up to the start of the tag. | 1908 | indent_level is not None |
1138 | s.append(indent_space) | 1909 | and ( |
1139 | s.append('<%s%s%s%s>' % ( | 1910 | not self.preserve_whitespace_tags |
1140 | prefix, self.name, attribute_string, close)) | 1911 | or self.name not in self.preserve_whitespace_tags |
1141 | if pretty_print: | 1912 | ) |
1142 | s.append("\n") | 1913 | ) |
1143 | s.append(contents) | ||
1144 | if pretty_print and contents and contents[-1] != "\n": | ||
1145 | s.append("\n") | ||
1146 | if pretty_print and closeTag: | ||
1147 | s.append(space) | ||
1148 | s.append(closeTag) | ||
1149 | if indent_level is not None and closeTag and self.next_sibling: | ||
1150 | # Even if this particular tag is not pretty-printed, | ||
1151 | # we're now done with the tag, and we should add a | ||
1152 | # newline if appropriate. | ||
1153 | s.append("\n") | ||
1154 | s = ''.join(s) | ||
1155 | return s | ||
1156 | 1914 | ||
1157 | def prettify(self, encoding=None, formatter="minimal"): | 1915 | def prettify(self, encoding=None, formatter="minimal"): |
1916 | """Pretty-print this PageElement as a string. | ||
1917 | |||
1918 | :param encoding: The eventual encoding of the string. If this is None, | ||
1919 | a Unicode string will be returned. | ||
1920 | :param formatter: A Formatter object, or a string naming one of | ||
1921 | the standard formatters. | ||
1922 | :return: A Unicode string (if encoding==None) or a bytestring | ||
1923 | (otherwise). | ||
1924 | """ | ||
1158 | if encoding is None: | 1925 | if encoding is None: |
1159 | return self.decode(True, formatter=formatter) | 1926 | return self.decode(True, formatter=formatter) |
1160 | else: | 1927 | else: |
@@ -1166,62 +1933,50 @@ class Tag(PageElement): | |||
1166 | """Renders the contents of this tag as a Unicode string. | 1933 | """Renders the contents of this tag as a Unicode string. |
1167 | 1934 | ||
1168 | :param indent_level: Each line of the rendering will be | 1935 | :param indent_level: Each line of the rendering will be |
1169 | indented this many spaces. | 1936 | indented this many levels. (The formatter decides what a |
1937 | 'level' means in terms of spaces or other characters | ||
1938 | output.) Used internally in recursive calls while | ||
1939 | pretty-printing. | ||
1170 | 1940 | ||
1171 | :param eventual_encoding: The tag is destined to be | 1941 | :param eventual_encoding: The tag is destined to be |
1172 | encoded into this encoding. This method is _not_ | 1942 | encoded into this encoding. decode_contents() is _not_ |
1173 | responsible for performing that encoding. This information | 1943 | responsible for performing that encoding. This information |
1174 | is passed in so that it can be substituted in if the | 1944 | is passed in so that it can be substituted in if the |
1175 | document contains a <META> tag that mentions the document's | 1945 | document contains a <META> tag that mentions the document's |
1176 | encoding. | 1946 | encoding. |
1177 | 1947 | ||
1178 | :param formatter: The output formatter responsible for converting | 1948 | :param formatter: A Formatter object, or a string naming one of |
1179 | entities to Unicode characters. | 1949 | the standard Formatters. |
1180 | """ | 1950 | |
1181 | # First off, turn a string formatter into a function. This | 1951 | """ |
1182 | # will stop the lookup from happening over and over again. | 1952 | return self.decode(indent_level, eventual_encoding, formatter, |
1183 | if not isinstance(formatter, collections.abc.Callable): | 1953 | iterator=self.descendants) |
1184 | formatter = self._formatter_for_name(formatter) | ||
1185 | |||
1186 | pretty_print = (indent_level is not None) | ||
1187 | s = [] | ||
1188 | for c in self: | ||
1189 | text = None | ||
1190 | if isinstance(c, NavigableString): | ||
1191 | text = c.output_ready(formatter) | ||
1192 | elif isinstance(c, Tag): | ||
1193 | s.append(c.decode(indent_level, eventual_encoding, | ||
1194 | formatter)) | ||
1195 | if text and indent_level and not self.name == 'pre': | ||
1196 | text = text.strip() | ||
1197 | if text: | ||
1198 | if pretty_print and not self.name == 'pre': | ||
1199 | s.append(" " * (indent_level - 1)) | ||
1200 | s.append(text) | ||
1201 | if pretty_print and not self.name == 'pre': | ||
1202 | s.append("\n") | ||
1203 | return ''.join(s) | ||
1204 | 1954 | ||
1205 | def encode_contents( | 1955 | def encode_contents( |
1206 | self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, | 1956 | self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING, |
1207 | formatter="minimal"): | 1957 | formatter="minimal"): |
1208 | """Renders the contents of this tag as a bytestring. | 1958 | """Renders the contents of this PageElement as a bytestring. |
1209 | 1959 | ||
1210 | :param indent_level: Each line of the rendering will be | 1960 | :param indent_level: Each line of the rendering will be |
1211 | indented this many spaces. | 1961 | indented this many levels. (The formatter decides what a |
1962 | 'level' means in terms of spaces or other characters | ||
1963 | output.) Used internally in recursive calls while | ||
1964 | pretty-printing. | ||
1212 | 1965 | ||
1213 | :param eventual_encoding: The bytestring will be in this encoding. | 1966 | :param eventual_encoding: The bytestring will be in this encoding. |
1214 | 1967 | ||
1215 | :param formatter: The output formatter responsible for converting | 1968 | :param formatter: A Formatter object, or a string naming one of |
1216 | entities to Unicode characters. | 1969 | the standard Formatters. |
1217 | """ | ||
1218 | 1970 | ||
1971 | :return: A bytestring. | ||
1972 | """ | ||
1219 | contents = self.decode_contents(indent_level, encoding, formatter) | 1973 | contents = self.decode_contents(indent_level, encoding, formatter) |
1220 | return contents.encode(encoding) | 1974 | return contents.encode(encoding) |
1221 | 1975 | ||
1222 | # Old method for BS3 compatibility | 1976 | # Old method for BS3 compatibility |
1223 | def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, | 1977 | def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING, |
1224 | prettyPrint=False, indentLevel=0): | 1978 | prettyPrint=False, indentLevel=0): |
1979 | """Deprecated method for BS3 compatibility.""" | ||
1225 | if not prettyPrint: | 1980 | if not prettyPrint: |
1226 | indentLevel = None | 1981 | indentLevel = None |
1227 | return self.encode_contents( | 1982 | return self.encode_contents( |
@@ -1229,44 +1984,88 @@ class Tag(PageElement): | |||
1229 | 1984 | ||
1230 | #Soup methods | 1985 | #Soup methods |
1231 | 1986 | ||
1232 | def find(self, name=None, attrs={}, recursive=True, text=None, | 1987 | def find(self, name=None, attrs={}, recursive=True, string=None, |
1233 | **kwargs): | 1988 | **kwargs): |
1234 | """Return only the first child of this Tag matching the given | 1989 | """Look in the children of this PageElement and find the first |
1235 | criteria.""" | 1990 | PageElement that matches the given criteria. |
1991 | |||
1992 | All find_* methods take a common set of arguments. See the online | ||
1993 | documentation for detailed explanations. | ||
1994 | |||
1995 | :param name: A filter on tag name. | ||
1996 | :param attrs: A dictionary of filters on attribute values. | ||
1997 | :param recursive: If this is True, find() will perform a | ||
1998 | recursive search of this PageElement's children. Otherwise, | ||
1999 | only the direct children will be considered. | ||
2000 | :param limit: Stop looking after finding this many results. | ||
2001 | :kwargs: A dictionary of filters on attribute values. | ||
2002 | :return: A PageElement. | ||
2003 | :rtype: bs4.element.Tag | bs4.element.NavigableString | ||
2004 | """ | ||
1236 | r = None | 2005 | r = None |
1237 | l = self.find_all(name, attrs, recursive, text, 1, **kwargs) | 2006 | l = self.find_all(name, attrs, recursive, string, 1, _stacklevel=3, |
2007 | **kwargs) | ||
1238 | if l: | 2008 | if l: |
1239 | r = l[0] | 2009 | r = l[0] |
1240 | return r | 2010 | return r |
1241 | findChild = find | 2011 | findChild = find #BS2 |
1242 | 2012 | ||
1243 | def find_all(self, name=None, attrs={}, recursive=True, text=None, | 2013 | def find_all(self, name=None, attrs={}, recursive=True, string=None, |
1244 | limit=None, **kwargs): | 2014 | limit=None, **kwargs): |
1245 | """Extracts a list of Tag objects that match the given | 2015 | """Look in the children of this PageElement and find all |
1246 | criteria. You can specify the name of the Tag and any | 2016 | PageElements that match the given criteria. |
1247 | attributes you want the Tag to have. | 2017 | |
1248 | 2018 | All find_* methods take a common set of arguments. See the online | |
1249 | The value of a key-value pair in the 'attrs' map can be a | 2019 | documentation for detailed explanations. |
1250 | string, a list of strings, a regular expression object, or a | 2020 | |
1251 | callable that takes a string and returns whether or not the | 2021 | :param name: A filter on tag name. |
1252 | string matches for some custom definition of 'matches'. The | 2022 | :param attrs: A dictionary of filters on attribute values. |
1253 | same is true of the tag name.""" | 2023 | :param recursive: If this is True, find_all() will perform a |
1254 | 2024 | recursive search of this PageElement's children. Otherwise, | |
2025 | only the direct children will be considered. | ||
2026 | :param limit: Stop looking after finding this many results. | ||
2027 | :kwargs: A dictionary of filters on attribute values. | ||
2028 | :return: A ResultSet of PageElements. | ||
2029 | :rtype: bs4.element.ResultSet | ||
2030 | """ | ||
1255 | generator = self.descendants | 2031 | generator = self.descendants |
1256 | if not recursive: | 2032 | if not recursive: |
1257 | generator = self.children | 2033 | generator = self.children |
1258 | return self._find_all(name, attrs, text, limit, generator, **kwargs) | 2034 | _stacklevel = kwargs.pop('_stacklevel', 2) |
2035 | return self._find_all(name, attrs, string, limit, generator, | ||
2036 | _stacklevel=_stacklevel+1, **kwargs) | ||
1259 | findAll = find_all # BS3 | 2037 | findAll = find_all # BS3 |
1260 | findChildren = find_all # BS2 | 2038 | findChildren = find_all # BS2 |
1261 | 2039 | ||
1262 | #Generator methods | 2040 | #Generator methods |
1263 | @property | 2041 | @property |
1264 | def children(self): | 2042 | def children(self): |
2043 | """Iterate over all direct children of this PageElement. | ||
2044 | |||
2045 | :yield: A sequence of PageElements. | ||
2046 | """ | ||
1265 | # return iter() to make the purpose of the method clear | 2047 | # return iter() to make the purpose of the method clear |
1266 | return iter(self.contents) # XXX This seems to be untested. | 2048 | return iter(self.contents) # XXX This seems to be untested. |
1267 | 2049 | ||
1268 | @property | 2050 | @property |
2051 | def self_and_descendants(self): | ||
2052 | """Iterate over this PageElement and its children in a | ||
2053 | breadth-first sequence. | ||
2054 | |||
2055 | :yield: A sequence of PageElements. | ||
2056 | """ | ||
2057 | if not self.hidden: | ||
2058 | yield self | ||
2059 | for i in self.descendants: | ||
2060 | yield i | ||
2061 | |||
2062 | @property | ||
1269 | def descendants(self): | 2063 | def descendants(self): |
2064 | """Iterate over all children of this PageElement in a | ||
2065 | breadth-first sequence. | ||
2066 | |||
2067 | :yield: A sequence of PageElements. | ||
2068 | """ | ||
1270 | if not len(self.contents): | 2069 | if not len(self.contents): |
1271 | return | 2070 | return |
1272 | stopNode = self._last_descendant().next_element | 2071 | stopNode = self._last_descendant().next_element |
@@ -1276,262 +2075,102 @@ class Tag(PageElement): | |||
1276 | current = current.next_element | 2075 | current = current.next_element |
1277 | 2076 | ||
1278 | # CSS selector code | 2077 | # CSS selector code |
2078 | def select_one(self, selector, namespaces=None, **kwargs): | ||
2079 | """Perform a CSS selection operation on the current element. | ||
1279 | 2080 | ||
1280 | _selector_combinators = ['>', '+', '~'] | 2081 | :param selector: A CSS selector. |
1281 | _select_debug = False | ||
1282 | def select_one(self, selector): | ||
1283 | """Perform a CSS selection operation on the current element.""" | ||
1284 | value = self.select(selector, limit=1) | ||
1285 | if value: | ||
1286 | return value[0] | ||
1287 | return None | ||
1288 | 2082 | ||
1289 | def select(self, selector, _candidate_generator=None, limit=None): | 2083 | :param namespaces: A dictionary mapping namespace prefixes |
1290 | """Perform a CSS selection operation on the current element.""" | 2084 | used in the CSS selector to namespace URIs. By default, |
1291 | 2085 | Beautiful Soup will use the prefixes it encountered while | |
1292 | # Handle grouping selectors if ',' exists, ie: p,a | 2086 | parsing the document. |
1293 | if ',' in selector: | ||
1294 | context = [] | ||
1295 | for partial_selector in selector.split(','): | ||
1296 | partial_selector = partial_selector.strip() | ||
1297 | if partial_selector == '': | ||
1298 | raise ValueError('Invalid group selection syntax: %s' % selector) | ||
1299 | candidates = self.select(partial_selector, limit=limit) | ||
1300 | for candidate in candidates: | ||
1301 | if candidate not in context: | ||
1302 | context.append(candidate) | ||
1303 | |||
1304 | if limit and len(context) >= limit: | ||
1305 | break | ||
1306 | return context | ||
1307 | 2087 | ||
1308 | tokens = selector.split() | 2088 | :param kwargs: Keyword arguments to be passed into Soup Sieve's |
1309 | current_context = [self] | 2089 | soupsieve.select() method. |
1310 | 2090 | ||
1311 | if tokens[-1] in self._selector_combinators: | 2091 | :return: A Tag. |
1312 | raise ValueError( | 2092 | :rtype: bs4.element.Tag |
1313 | 'Final combinator "%s" is missing an argument.' % tokens[-1]) | 2093 | """ |
2094 | return self.css.select_one(selector, namespaces, **kwargs) | ||
1314 | 2095 | ||
1315 | if self._select_debug: | 2096 | def select(self, selector, namespaces=None, limit=None, **kwargs): |
1316 | print('Running CSS selector "%s"' % selector) | 2097 | """Perform a CSS selection operation on the current element. |
1317 | 2098 | ||
1318 | for index, token in enumerate(tokens): | 2099 | This uses the SoupSieve library. |
1319 | new_context = [] | ||
1320 | new_context_ids = set([]) | ||
1321 | 2100 | ||
1322 | if tokens[index-1] in self._selector_combinators: | 2101 | :param selector: A string containing a CSS selector. |
1323 | # This token was consumed by the previous combinator. Skip it. | ||
1324 | if self._select_debug: | ||
1325 | print(' Token was consumed by the previous combinator.') | ||
1326 | continue | ||
1327 | 2102 | ||
1328 | if self._select_debug: | 2103 | :param namespaces: A dictionary mapping namespace prefixes |
1329 | print(' Considering token "%s"' % token) | 2104 | used in the CSS selector to namespace URIs. By default, |
1330 | recursive_candidate_generator = None | 2105 | Beautiful Soup will use the prefixes it encountered while |
1331 | tag_name = None | 2106 | parsing the document. |
1332 | 2107 | ||
1333 | # Each operation corresponds to a checker function, a rule | 2108 | :param limit: After finding this number of results, stop looking. |
1334 | # for determining whether a candidate matches the | 2109 | |
1335 | # selector. Candidates are generated by the active | 2110 | :param kwargs: Keyword arguments to be passed into SoupSieve's |
1336 | # iterator. | 2111 | soupsieve.select() method. |
1337 | checker = None | 2112 | |
1338 | 2113 | :return: A ResultSet of Tags. | |
1339 | m = self.attribselect_re.match(token) | 2114 | :rtype: bs4.element.ResultSet |
1340 | if m is not None: | 2115 | """ |
1341 | # Attribute selector | 2116 | return self.css.select(selector, namespaces, limit, **kwargs) |
1342 | tag_name, attribute, operator, value = m.groups() | 2117 | |
1343 | checker = self._attribute_checker(operator, attribute, value) | 2118 | @property |
1344 | 2119 | def css(self): | |
1345 | elif '#' in token: | 2120 | """Return an interface to the CSS selector API.""" |
1346 | # ID selector | 2121 | return CSS(self) |
1347 | tag_name, tag_id = token.split('#', 1) | ||
1348 | def id_matches(tag): | ||
1349 | return tag.get('id', None) == tag_id | ||
1350 | checker = id_matches | ||
1351 | |||
1352 | elif '.' in token: | ||
1353 | # Class selector | ||
1354 | tag_name, klass = token.split('.', 1) | ||
1355 | classes = set(klass.split('.')) | ||
1356 | def classes_match(candidate): | ||
1357 | return classes.issubset(candidate.get('class', [])) | ||
1358 | checker = classes_match | ||
1359 | |||
1360 | elif ':' in token: | ||
1361 | # Pseudo-class | ||
1362 | tag_name, pseudo = token.split(':', 1) | ||
1363 | if tag_name == '': | ||
1364 | raise ValueError( | ||
1365 | "A pseudo-class must be prefixed with a tag name.") | ||
1366 | pseudo_attributes = re.match(r'([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo) | ||
1367 | found = [] | ||
1368 | if pseudo_attributes is None: | ||
1369 | pseudo_type = pseudo | ||
1370 | pseudo_value = None | ||
1371 | else: | ||
1372 | pseudo_type, pseudo_value = pseudo_attributes.groups() | ||
1373 | if pseudo_type == 'nth-of-type': | ||
1374 | try: | ||
1375 | pseudo_value = int(pseudo_value) | ||
1376 | except: | ||
1377 | raise NotImplementedError( | ||
1378 | 'Only numeric values are currently supported for the nth-of-type pseudo-class.') | ||
1379 | if pseudo_value < 1: | ||
1380 | raise ValueError( | ||
1381 | 'nth-of-type pseudo-class value must be at least 1.') | ||
1382 | class Counter(object): | ||
1383 | def __init__(self, destination): | ||
1384 | self.count = 0 | ||
1385 | self.destination = destination | ||
1386 | |||
1387 | def nth_child_of_type(self, tag): | ||
1388 | self.count += 1 | ||
1389 | if self.count == self.destination: | ||
1390 | return True | ||
1391 | if self.count > self.destination: | ||
1392 | # Stop the generator that's sending us | ||
1393 | # these things. | ||
1394 | raise StopIteration() | ||
1395 | return False | ||
1396 | checker = Counter(pseudo_value).nth_child_of_type | ||
1397 | else: | ||
1398 | raise NotImplementedError( | ||
1399 | 'Only the following pseudo-classes are implemented: nth-of-type.') | ||
1400 | |||
1401 | elif token == '*': | ||
1402 | # Star selector -- matches everything | ||
1403 | pass | ||
1404 | elif token == '>': | ||
1405 | # Run the next token as a CSS selector against the | ||
1406 | # direct children of each tag in the current context. | ||
1407 | recursive_candidate_generator = lambda tag: tag.children | ||
1408 | elif token == '~': | ||
1409 | # Run the next token as a CSS selector against the | ||
1410 | # siblings of each tag in the current context. | ||
1411 | recursive_candidate_generator = lambda tag: tag.next_siblings | ||
1412 | elif token == '+': | ||
1413 | # For each tag in the current context, run the next | ||
1414 | # token as a CSS selector against the tag's next | ||
1415 | # sibling that's a tag. | ||
1416 | def next_tag_sibling(tag): | ||
1417 | yield tag.find_next_sibling(True) | ||
1418 | recursive_candidate_generator = next_tag_sibling | ||
1419 | |||
1420 | elif self.tag_name_re.match(token): | ||
1421 | # Just a tag name. | ||
1422 | tag_name = token | ||
1423 | else: | ||
1424 | raise ValueError( | ||
1425 | 'Unsupported or invalid CSS selector: "%s"' % token) | ||
1426 | if recursive_candidate_generator: | ||
1427 | # This happens when the selector looks like "> foo". | ||
1428 | # | ||
1429 | # The generator calls select() recursively on every | ||
1430 | # member of the current context, passing in a different | ||
1431 | # candidate generator and a different selector. | ||
1432 | # | ||
1433 | # In the case of "> foo", the candidate generator is | ||
1434 | # one that yields a tag's direct children (">"), and | ||
1435 | # the selector is "foo". | ||
1436 | next_token = tokens[index+1] | ||
1437 | def recursive_select(tag): | ||
1438 | if self._select_debug: | ||
1439 | print(' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs)) | ||
1440 | print('-' * 40) | ||
1441 | for i in tag.select(next_token, recursive_candidate_generator): | ||
1442 | if self._select_debug: | ||
1443 | print('(Recursive select picked up candidate %s %s)' % (i.name, i.attrs)) | ||
1444 | yield i | ||
1445 | if self._select_debug: | ||
1446 | print('-' * 40) | ||
1447 | _use_candidate_generator = recursive_select | ||
1448 | elif _candidate_generator is None: | ||
1449 | # By default, a tag's candidates are all of its | ||
1450 | # children. If tag_name is defined, only yield tags | ||
1451 | # with that name. | ||
1452 | if self._select_debug: | ||
1453 | if tag_name: | ||
1454 | check = "[any]" | ||
1455 | else: | ||
1456 | check = tag_name | ||
1457 | print(' Default candidate generator, tag name="%s"' % check) | ||
1458 | if self._select_debug: | ||
1459 | # This is redundant with later code, but it stops | ||
1460 | # a bunch of bogus tags from cluttering up the | ||
1461 | # debug log. | ||
1462 | def default_candidate_generator(tag): | ||
1463 | for child in tag.descendants: | ||
1464 | if not isinstance(child, Tag): | ||
1465 | continue | ||
1466 | if tag_name and not child.name == tag_name: | ||
1467 | continue | ||
1468 | yield child | ||
1469 | _use_candidate_generator = default_candidate_generator | ||
1470 | else: | ||
1471 | _use_candidate_generator = lambda tag: tag.descendants | ||
1472 | else: | ||
1473 | _use_candidate_generator = _candidate_generator | ||
1474 | |||
1475 | count = 0 | ||
1476 | for tag in current_context: | ||
1477 | if self._select_debug: | ||
1478 | print(" Running candidate generator on %s %s" % ( | ||
1479 | tag.name, repr(tag.attrs))) | ||
1480 | for candidate in _use_candidate_generator(tag): | ||
1481 | if not isinstance(candidate, Tag): | ||
1482 | continue | ||
1483 | if tag_name and candidate.name != tag_name: | ||
1484 | continue | ||
1485 | if checker is not None: | ||
1486 | try: | ||
1487 | result = checker(candidate) | ||
1488 | except StopIteration: | ||
1489 | # The checker has decided we should no longer | ||
1490 | # run the generator. | ||
1491 | break | ||
1492 | if checker is None or result: | ||
1493 | if self._select_debug: | ||
1494 | print(" SUCCESS %s %s" % (candidate.name, repr(candidate.attrs))) | ||
1495 | if id(candidate) not in new_context_ids: | ||
1496 | # If a tag matches a selector more than once, | ||
1497 | # don't include it in the context more than once. | ||
1498 | new_context.append(candidate) | ||
1499 | new_context_ids.add(id(candidate)) | ||
1500 | if limit and len(new_context) >= limit: | ||
1501 | break | ||
1502 | elif self._select_debug: | ||
1503 | print(" FAILURE %s %s" % (candidate.name, repr(candidate.attrs))) | ||
1504 | |||
1505 | |||
1506 | current_context = new_context | ||
1507 | |||
1508 | if self._select_debug: | ||
1509 | print("Final verdict:") | ||
1510 | for i in current_context: | ||
1511 | print(" %s %s" % (i.name, i.attrs)) | ||
1512 | return current_context | ||
1513 | 2122 | ||
1514 | # Old names for backwards compatibility | 2123 | # Old names for backwards compatibility |
1515 | def childGenerator(self): | 2124 | def childGenerator(self): |
2125 | """Deprecated generator.""" | ||
1516 | return self.children | 2126 | return self.children |
1517 | 2127 | ||
1518 | def recursiveChildGenerator(self): | 2128 | def recursiveChildGenerator(self): |
2129 | """Deprecated generator.""" | ||
1519 | return self.descendants | 2130 | return self.descendants |
1520 | 2131 | ||
1521 | def has_key(self, key): | 2132 | def has_key(self, key): |
1522 | """This was kind of misleading because has_key() (attributes) | 2133 | """Deprecated method. This was kind of misleading because has_key() |
1523 | was different from __in__ (contents). has_key() is gone in | 2134 | (attributes) was different from __in__ (contents). |
1524 | Python 3, anyway.""" | 2135 | |
1525 | warnings.warn('has_key is deprecated. Use has_attr("%s") instead.' % ( | 2136 | has_key() is gone in Python 3, anyway. |
1526 | key)) | 2137 | """ |
2138 | warnings.warn( | ||
2139 | 'has_key is deprecated. Use has_attr(key) instead.', | ||
2140 | DeprecationWarning, stacklevel=2 | ||
2141 | ) | ||
1527 | return self.has_attr(key) | 2142 | return self.has_attr(key) |
1528 | 2143 | ||
1529 | # Next, a couple classes to represent queries and their results. | 2144 | # Next, a couple classes to represent queries and their results. |
1530 | class SoupStrainer(object): | 2145 | class SoupStrainer(object): |
1531 | """Encapsulates a number of ways of matching a markup element (tag or | 2146 | """Encapsulates a number of ways of matching a markup element (tag or |
1532 | text).""" | 2147 | string). |
2148 | |||
2149 | This is primarily used to underpin the find_* methods, but you can | ||
2150 | create one yourself and pass it in as `parse_only` to the | ||
2151 | `BeautifulSoup` constructor, to parse a subset of a large | ||
2152 | document. | ||
2153 | """ | ||
2154 | |||
2155 | def __init__(self, name=None, attrs={}, string=None, **kwargs): | ||
2156 | """Constructor. | ||
2157 | |||
2158 | The SoupStrainer constructor takes the same arguments passed | ||
2159 | into the find_* methods. See the online documentation for | ||
2160 | detailed explanations. | ||
2161 | |||
2162 | :param name: A filter on tag name. | ||
2163 | :param attrs: A dictionary of filters on attribute values. | ||
2164 | :param string: A filter for a NavigableString with specific text. | ||
2165 | :kwargs: A dictionary of filters on attribute values. | ||
2166 | """ | ||
2167 | if string is None and 'text' in kwargs: | ||
2168 | string = kwargs.pop('text') | ||
2169 | warnings.warn( | ||
2170 | "The 'text' argument to the SoupStrainer constructor is deprecated. Use 'string' instead.", | ||
2171 | DeprecationWarning, stacklevel=2 | ||
2172 | ) | ||
1533 | 2173 | ||
1534 | def __init__(self, name=None, attrs={}, text=None, **kwargs): | ||
1535 | self.name = self._normalize_search_value(name) | 2174 | self.name = self._normalize_search_value(name) |
1536 | if not isinstance(attrs, dict): | 2175 | if not isinstance(attrs, dict): |
1537 | # Treat a non-dict value for attrs as a search for the 'class' | 2176 | # Treat a non-dict value for attrs as a search for the 'class' |
@@ -1556,12 +2195,15 @@ class SoupStrainer(object): | |||
1556 | normalized_attrs[key] = self._normalize_search_value(value) | 2195 | normalized_attrs[key] = self._normalize_search_value(value) |
1557 | 2196 | ||
1558 | self.attrs = normalized_attrs | 2197 | self.attrs = normalized_attrs |
1559 | self.text = self._normalize_search_value(text) | 2198 | self.string = self._normalize_search_value(string) |
2199 | |||
2200 | # DEPRECATED but just in case someone is checking this. | ||
2201 | self.text = self.string | ||
1560 | 2202 | ||
1561 | def _normalize_search_value(self, value): | 2203 | def _normalize_search_value(self, value): |
1562 | # Leave it alone if it's a Unicode string, a callable, a | 2204 | # Leave it alone if it's a Unicode string, a callable, a |
1563 | # regular expression, a boolean, or None. | 2205 | # regular expression, a boolean, or None. |
1564 | if (isinstance(value, str) or isinstance(value, collections.abc.Callable) or hasattr(value, 'match') | 2206 | if (isinstance(value, str) or isinstance(value, Callable) or hasattr(value, 'match') |
1565 | or isinstance(value, bool) or value is None): | 2207 | or isinstance(value, bool) or value is None): |
1566 | return value | 2208 | return value |
1567 | 2209 | ||
@@ -1589,19 +2231,40 @@ class SoupStrainer(object): | |||
1589 | return str(str(value)) | 2231 | return str(str(value)) |
1590 | 2232 | ||
1591 | def __str__(self): | 2233 | def __str__(self): |
1592 | if self.text: | 2234 | """A human-readable representation of this SoupStrainer.""" |
1593 | return self.text | 2235 | if self.string: |
2236 | return self.string | ||
1594 | else: | 2237 | else: |
1595 | return "%s|%s" % (self.name, self.attrs) | 2238 | return "%s|%s" % (self.name, self.attrs) |
1596 | 2239 | ||
1597 | def search_tag(self, markup_name=None, markup_attrs={}): | 2240 | def search_tag(self, markup_name=None, markup_attrs={}): |
2241 | """Check whether a Tag with the given name and attributes would | ||
2242 | match this SoupStrainer. | ||
2243 | |||
2244 | Used prospectively to decide whether to even bother creating a Tag | ||
2245 | object. | ||
2246 | |||
2247 | :param markup_name: A tag name as found in some markup. | ||
2248 | :param markup_attrs: A dictionary of attributes as found in some markup. | ||
2249 | |||
2250 | :return: True if the prospective tag would match this SoupStrainer; | ||
2251 | False otherwise. | ||
2252 | """ | ||
1598 | found = None | 2253 | found = None |
1599 | markup = None | 2254 | markup = None |
1600 | if isinstance(markup_name, Tag): | 2255 | if isinstance(markup_name, Tag): |
1601 | markup = markup_name | 2256 | markup = markup_name |
1602 | markup_attrs = markup | 2257 | markup_attrs = markup |
2258 | |||
2259 | if isinstance(self.name, str): | ||
2260 | # Optimization for a very common case where the user is | ||
2261 | # searching for a tag with one specific name, and we're | ||
2262 | # looking at a tag with a different name. | ||
2263 | if markup and not markup.prefix and self.name != markup.name: | ||
2264 | return False | ||
2265 | |||
1603 | call_function_with_tag_data = ( | 2266 | call_function_with_tag_data = ( |
1604 | isinstance(self.name, collections.abc.Callable) | 2267 | isinstance(self.name, Callable) |
1605 | and not isinstance(markup_name, Tag)) | 2268 | and not isinstance(markup_name, Tag)) |
1606 | 2269 | ||
1607 | if ((not self.name) | 2270 | if ((not self.name) |
@@ -1630,13 +2293,22 @@ class SoupStrainer(object): | |||
1630 | found = markup | 2293 | found = markup |
1631 | else: | 2294 | else: |
1632 | found = markup_name | 2295 | found = markup_name |
1633 | if found and self.text and not self._matches(found.string, self.text): | 2296 | if found and self.string and not self._matches(found.string, self.string): |
1634 | found = None | 2297 | found = None |
1635 | return found | 2298 | return found |
2299 | |||
2300 | # For BS3 compatibility. | ||
1636 | searchTag = search_tag | 2301 | searchTag = search_tag |
1637 | 2302 | ||
1638 | def search(self, markup): | 2303 | def search(self, markup): |
1639 | # print 'looking for %s in %s' % (self, markup) | 2304 | """Find all items in `markup` that match this SoupStrainer. |
2305 | |||
2306 | Used by the core _find_all() method, which is ultimately | ||
2307 | called by all find_* methods. | ||
2308 | |||
2309 | :param markup: A PageElement or a list of them. | ||
2310 | """ | ||
2311 | # print('looking for %s in %s' % (self, markup)) | ||
1640 | found = None | 2312 | found = None |
1641 | # If given a list of items, scan it for a text element that | 2313 | # If given a list of items, scan it for a text element that |
1642 | # matches. | 2314 | # matches. |
@@ -1649,49 +2321,44 @@ class SoupStrainer(object): | |||
1649 | # If it's a Tag, make sure its name or attributes match. | 2321 | # If it's a Tag, make sure its name or attributes match. |
1650 | # Don't bother with Tags if we're searching for text. | 2322 | # Don't bother with Tags if we're searching for text. |
1651 | elif isinstance(markup, Tag): | 2323 | elif isinstance(markup, Tag): |
1652 | if not self.text or self.name or self.attrs: | 2324 | if not self.string or self.name or self.attrs: |
1653 | found = self.search_tag(markup) | 2325 | found = self.search_tag(markup) |
1654 | # If it's text, make sure the text matches. | 2326 | # If it's text, make sure the text matches. |
1655 | elif isinstance(markup, NavigableString) or \ | 2327 | elif isinstance(markup, NavigableString) or \ |
1656 | isinstance(markup, str): | 2328 | isinstance(markup, str): |
1657 | if not self.name and not self.attrs and self._matches(markup, self.text): | 2329 | if not self.name and not self.attrs and self._matches(markup, self.string): |
1658 | found = markup | 2330 | found = markup |
1659 | else: | 2331 | else: |
1660 | raise Exception( | 2332 | raise Exception( |
1661 | "I don't know how to match against a %s" % markup.__class__) | 2333 | "I don't know how to match against a %s" % markup.__class__) |
1662 | return found | 2334 | return found |
1663 | 2335 | ||
1664 | def _matches(self, markup, match_against): | 2336 | def _matches(self, markup, match_against, already_tried=None): |
1665 | # print u"Matching %s against %s" % (markup, match_against) | 2337 | # print(u"Matching %s against %s" % (markup, match_against)) |
1666 | result = False | 2338 | result = False |
1667 | if isinstance(markup, list) or isinstance(markup, tuple): | 2339 | if isinstance(markup, list) or isinstance(markup, tuple): |
1668 | # This should only happen when searching a multi-valued attribute | 2340 | # This should only happen when searching a multi-valued attribute |
1669 | # like 'class'. | 2341 | # like 'class'. |
1670 | if (isinstance(match_against, str) | 2342 | for item in markup: |
1671 | and ' ' in match_against): | 2343 | if self._matches(item, match_against): |
1672 | # A bit of a special case. If they try to match "foo | 2344 | return True |
1673 | # bar" on a multivalue attribute's value, only accept | 2345 | # We didn't match any particular value of the multivalue |
1674 | # the literal value "foo bar" | 2346 | # attribute, but maybe we match the attribute value when |
1675 | # | 2347 | # considered as a string. |
1676 | # XXX This is going to be pretty slow because we keep | 2348 | if self._matches(' '.join(markup), match_against): |
1677 | # splitting match_against. But it shouldn't come up | 2349 | return True |
1678 | # too often. | 2350 | return False |
1679 | return (whitespace_re.split(match_against) == markup) | ||
1680 | else: | ||
1681 | for item in markup: | ||
1682 | if self._matches(item, match_against): | ||
1683 | return True | ||
1684 | return False | ||
1685 | 2351 | ||
1686 | if match_against is True: | 2352 | if match_against is True: |
1687 | # True matches any non-None value. | 2353 | # True matches any non-None value. |
1688 | return markup is not None | 2354 | return markup is not None |
1689 | 2355 | ||
1690 | if isinstance(match_against, collections.abc.Callable): | 2356 | if isinstance(match_against, Callable): |
1691 | return match_against(markup) | 2357 | return match_against(markup) |
1692 | 2358 | ||
1693 | # Custom callables take the tag as an argument, but all | 2359 | # Custom callables take the tag as an argument, but all |
1694 | # other ways of matching match the tag name as a string. | 2360 | # other ways of matching match the tag name as a string. |
2361 | original_markup = markup | ||
1695 | if isinstance(markup, Tag): | 2362 | if isinstance(markup, Tag): |
1696 | markup = markup.name | 2363 | markup = markup.name |
1697 | 2364 | ||
@@ -1702,23 +2369,67 @@ class SoupStrainer(object): | |||
1702 | # None matches None, False, an empty string, an empty list, and so on. | 2369 | # None matches None, False, an empty string, an empty list, and so on. |
1703 | return not match_against | 2370 | return not match_against |
1704 | 2371 | ||
1705 | if isinstance(match_against, str): | 2372 | if (hasattr(match_against, '__iter__') |
2373 | and not isinstance(match_against, str)): | ||
2374 | # We're asked to match against an iterable of items. | ||
2375 | # The markup must be match at least one item in the | ||
2376 | # iterable. We'll try each one in turn. | ||
2377 | # | ||
2378 | # To avoid infinite recursion we need to keep track of | ||
2379 | # items we've already seen. | ||
2380 | if not already_tried: | ||
2381 | already_tried = set() | ||
2382 | for item in match_against: | ||
2383 | if item.__hash__: | ||
2384 | key = item | ||
2385 | else: | ||
2386 | key = id(item) | ||
2387 | if key in already_tried: | ||
2388 | continue | ||
2389 | else: | ||
2390 | already_tried.add(key) | ||
2391 | if self._matches(original_markup, item, already_tried): | ||
2392 | return True | ||
2393 | else: | ||
2394 | return False | ||
2395 | |||
2396 | # Beyond this point we might need to run the test twice: once against | ||
2397 | # the tag's name and once against its prefixed name. | ||
2398 | match = False | ||
2399 | |||
2400 | if not match and isinstance(match_against, str): | ||
1706 | # Exact string match | 2401 | # Exact string match |
1707 | return markup == match_against | 2402 | match = markup == match_against |
1708 | 2403 | ||
1709 | if hasattr(match_against, 'match'): | 2404 | if not match and hasattr(match_against, 'search'): |
1710 | # Regexp match | 2405 | # Regexp match |
1711 | return match_against.search(markup) | 2406 | return match_against.search(markup) |
1712 | 2407 | ||
1713 | if hasattr(match_against, '__iter__'): | 2408 | if (not match |
1714 | # The markup must be an exact match against something | 2409 | and isinstance(original_markup, Tag) |
1715 | # in the iterable. | 2410 | and original_markup.prefix): |
1716 | return markup in match_against | 2411 | # Try the whole thing again with the prefixed tag name. |
2412 | return self._matches( | ||
2413 | original_markup.prefix + ':' + original_markup.name, match_against | ||
2414 | ) | ||
2415 | |||
2416 | return match | ||
1717 | 2417 | ||
1718 | 2418 | ||
1719 | class ResultSet(list): | 2419 | class ResultSet(list): |
1720 | """A ResultSet is just a list that keeps track of the SoupStrainer | 2420 | """A ResultSet is just a list that keeps track of the SoupStrainer |
1721 | that created it.""" | 2421 | that created it.""" |
1722 | def __init__(self, source, result=()): | 2422 | def __init__(self, source, result=()): |
2423 | """Constructor. | ||
2424 | |||
2425 | :param source: A SoupStrainer. | ||
2426 | :param result: A list of PageElements. | ||
2427 | """ | ||
1723 | super(ResultSet, self).__init__(result) | 2428 | super(ResultSet, self).__init__(result) |
1724 | self.source = source | 2429 | self.source = source |
2430 | |||
2431 | def __getattr__(self, key): | ||
2432 | """Raise a helpful exception to explain a common code fix.""" | ||
2433 | raise AttributeError( | ||
2434 | "ResultSet object has no attribute '%s'. You're probably treating a list of elements like a single element. Did you call find_all() when you meant to call find()?" % key | ||
2435 | ) | ||
diff --git a/bitbake/lib/bs4/formatter.py b/bitbake/lib/bs4/formatter.py new file mode 100644 index 0000000000..9fa1b57cb6 --- /dev/null +++ b/bitbake/lib/bs4/formatter.py | |||
@@ -0,0 +1,185 @@ | |||
1 | from bs4.dammit import EntitySubstitution | ||
2 | |||
3 | class Formatter(EntitySubstitution): | ||
4 | """Describes a strategy to use when outputting a parse tree to a string. | ||
5 | |||
6 | Some parts of this strategy come from the distinction between | ||
7 | HTML4, HTML5, and XML. Others are configurable by the user. | ||
8 | |||
9 | Formatters are passed in as the `formatter` argument to methods | ||
10 | like `PageElement.encode`. Most people won't need to think about | ||
11 | formatters, and most people who need to think about them can pass | ||
12 | in one of these predefined strings as `formatter` rather than | ||
13 | making a new Formatter object: | ||
14 | |||
15 | For HTML documents: | ||
16 | * 'html' - HTML entity substitution for generic HTML documents. (default) | ||
17 | * 'html5' - HTML entity substitution for HTML5 documents, as | ||
18 | well as some optimizations in the way tags are rendered. | ||
19 | * 'minimal' - Only make the substitutions necessary to guarantee | ||
20 | valid HTML. | ||
21 | * None - Do not perform any substitution. This will be faster | ||
22 | but may result in invalid markup. | ||
23 | |||
24 | For XML documents: | ||
25 | * 'html' - Entity substitution for XHTML documents. | ||
26 | * 'minimal' - Only make the substitutions necessary to guarantee | ||
27 | valid XML. (default) | ||
28 | * None - Do not perform any substitution. This will be faster | ||
29 | but may result in invalid markup. | ||
30 | """ | ||
31 | # Registries of XML and HTML formatters. | ||
32 | XML_FORMATTERS = {} | ||
33 | HTML_FORMATTERS = {} | ||
34 | |||
35 | HTML = 'html' | ||
36 | XML = 'xml' | ||
37 | |||
38 | HTML_DEFAULTS = dict( | ||
39 | cdata_containing_tags=set(["script", "style"]), | ||
40 | ) | ||
41 | |||
42 | def _default(self, language, value, kwarg): | ||
43 | if value is not None: | ||
44 | return value | ||
45 | if language == self.XML: | ||
46 | return set() | ||
47 | return self.HTML_DEFAULTS[kwarg] | ||
48 | |||
49 | def __init__( | ||
50 | self, language=None, entity_substitution=None, | ||
51 | void_element_close_prefix='/', cdata_containing_tags=None, | ||
52 | empty_attributes_are_booleans=False, indent=1, | ||
53 | ): | ||
54 | r"""Constructor. | ||
55 | |||
56 | :param language: This should be Formatter.XML if you are formatting | ||
57 | XML markup and Formatter.HTML if you are formatting HTML markup. | ||
58 | |||
59 | :param entity_substitution: A function to call to replace special | ||
60 | characters with XML/HTML entities. For examples, see | ||
61 | bs4.dammit.EntitySubstitution.substitute_html and substitute_xml. | ||
62 | :param void_element_close_prefix: By default, void elements | ||
63 | are represented as <tag/> (XML rules) rather than <tag> | ||
64 | (HTML rules). To get <tag>, pass in the empty string. | ||
65 | :param cdata_containing_tags: The list of tags that are defined | ||
66 | as containing CDATA in this dialect. For example, in HTML, | ||
67 | <script> and <style> tags are defined as containing CDATA, | ||
68 | and their contents should not be formatted. | ||
69 | :param blank_attributes_are_booleans: Render attributes whose value | ||
70 | is the empty string as HTML-style boolean attributes. | ||
71 | (Attributes whose value is None are always rendered this way.) | ||
72 | |||
73 | :param indent: If indent is a non-negative integer or string, | ||
74 | then the contents of elements will be indented | ||
75 | appropriately when pretty-printing. An indent level of 0, | ||
76 | negative, or "" will only insert newlines. Using a | ||
77 | positive integer indent indents that many spaces per | ||
78 | level. If indent is a string (such as "\t"), that string | ||
79 | is used to indent each level. The default behavior is to | ||
80 | indent one space per level. | ||
81 | """ | ||
82 | self.language = language | ||
83 | self.entity_substitution = entity_substitution | ||
84 | self.void_element_close_prefix = void_element_close_prefix | ||
85 | self.cdata_containing_tags = self._default( | ||
86 | language, cdata_containing_tags, 'cdata_containing_tags' | ||
87 | ) | ||
88 | self.empty_attributes_are_booleans=empty_attributes_are_booleans | ||
89 | if indent is None: | ||
90 | indent = 0 | ||
91 | if isinstance(indent, int): | ||
92 | if indent < 0: | ||
93 | indent = 0 | ||
94 | indent = ' ' * indent | ||
95 | elif isinstance(indent, str): | ||
96 | indent = indent | ||
97 | else: | ||
98 | indent = ' ' | ||
99 | self.indent = indent | ||
100 | |||
101 | def substitute(self, ns): | ||
102 | """Process a string that needs to undergo entity substitution. | ||
103 | This may be a string encountered in an attribute value or as | ||
104 | text. | ||
105 | |||
106 | :param ns: A string. | ||
107 | :return: A string with certain characters replaced by named | ||
108 | or numeric entities. | ||
109 | """ | ||
110 | if not self.entity_substitution: | ||
111 | return ns | ||
112 | from .element import NavigableString | ||
113 | if (isinstance(ns, NavigableString) | ||
114 | and ns.parent is not None | ||
115 | and ns.parent.name in self.cdata_containing_tags): | ||
116 | # Do nothing. | ||
117 | return ns | ||
118 | # Substitute. | ||
119 | return self.entity_substitution(ns) | ||
120 | |||
121 | def attribute_value(self, value): | ||
122 | """Process the value of an attribute. | ||
123 | |||
124 | :param ns: A string. | ||
125 | :return: A string with certain characters replaced by named | ||
126 | or numeric entities. | ||
127 | """ | ||
128 | return self.substitute(value) | ||
129 | |||
130 | def attributes(self, tag): | ||
131 | """Reorder a tag's attributes however you want. | ||
132 | |||
133 | By default, attributes are sorted alphabetically. This makes | ||
134 | behavior consistent between Python 2 and Python 3, and preserves | ||
135 | backwards compatibility with older versions of Beautiful Soup. | ||
136 | |||
137 | If `empty_boolean_attributes` is True, then attributes whose | ||
138 | values are set to the empty string will be treated as boolean | ||
139 | attributes. | ||
140 | """ | ||
141 | if tag.attrs is None: | ||
142 | return [] | ||
143 | return sorted( | ||
144 | (k, (None if self.empty_attributes_are_booleans and v == '' else v)) | ||
145 | for k, v in list(tag.attrs.items()) | ||
146 | ) | ||
147 | |||
148 | class HTMLFormatter(Formatter): | ||
149 | """A generic Formatter for HTML.""" | ||
150 | REGISTRY = {} | ||
151 | def __init__(self, *args, **kwargs): | ||
152 | super(HTMLFormatter, self).__init__(self.HTML, *args, **kwargs) | ||
153 | |||
154 | |||
155 | class XMLFormatter(Formatter): | ||
156 | """A generic Formatter for XML.""" | ||
157 | REGISTRY = {} | ||
158 | def __init__(self, *args, **kwargs): | ||
159 | super(XMLFormatter, self).__init__(self.XML, *args, **kwargs) | ||
160 | |||
161 | |||
162 | # Set up aliases for the default formatters. | ||
163 | HTMLFormatter.REGISTRY['html'] = HTMLFormatter( | ||
164 | entity_substitution=EntitySubstitution.substitute_html | ||
165 | ) | ||
166 | HTMLFormatter.REGISTRY["html5"] = HTMLFormatter( | ||
167 | entity_substitution=EntitySubstitution.substitute_html, | ||
168 | void_element_close_prefix=None, | ||
169 | empty_attributes_are_booleans=True, | ||
170 | ) | ||
171 | HTMLFormatter.REGISTRY["minimal"] = HTMLFormatter( | ||
172 | entity_substitution=EntitySubstitution.substitute_xml | ||
173 | ) | ||
174 | HTMLFormatter.REGISTRY[None] = HTMLFormatter( | ||
175 | entity_substitution=None | ||
176 | ) | ||
177 | XMLFormatter.REGISTRY["html"] = XMLFormatter( | ||
178 | entity_substitution=EntitySubstitution.substitute_html | ||
179 | ) | ||
180 | XMLFormatter.REGISTRY["minimal"] = XMLFormatter( | ||
181 | entity_substitution=EntitySubstitution.substitute_xml | ||
182 | ) | ||
183 | XMLFormatter.REGISTRY[None] = Formatter( | ||
184 | Formatter(Formatter.XML, entity_substitution=None) | ||
185 | ) | ||
diff --git a/bitbake/lib/bs4/testing.py b/bitbake/lib/bs4/testing.py deleted file mode 100644 index 6584ecf303..0000000000 --- a/bitbake/lib/bs4/testing.py +++ /dev/null | |||
@@ -1,686 +0,0 @@ | |||
1 | """Helper classes for tests.""" | ||
2 | |||
3 | __license__ = "MIT" | ||
4 | |||
5 | import pickle | ||
6 | import copy | ||
7 | import unittest | ||
8 | from unittest import TestCase | ||
9 | from bs4 import BeautifulSoup | ||
10 | from bs4.element import ( | ||
11 | CharsetMetaAttributeValue, | ||
12 | Comment, | ||
13 | ContentMetaAttributeValue, | ||
14 | Doctype, | ||
15 | SoupStrainer, | ||
16 | ) | ||
17 | |||
18 | from bs4.builder._htmlparser import HTMLParserTreeBuilder | ||
19 | default_builder = HTMLParserTreeBuilder | ||
20 | |||
21 | |||
22 | class SoupTest(unittest.TestCase): | ||
23 | |||
24 | @property | ||
25 | def default_builder(self): | ||
26 | return default_builder() | ||
27 | |||
28 | def soup(self, markup, **kwargs): | ||
29 | """Build a Beautiful Soup object from markup.""" | ||
30 | builder = kwargs.pop('builder', self.default_builder) | ||
31 | return BeautifulSoup(markup, builder=builder, **kwargs) | ||
32 | |||
33 | def document_for(self, markup): | ||
34 | """Turn an HTML fragment into a document. | ||
35 | |||
36 | The details depend on the builder. | ||
37 | """ | ||
38 | return self.default_builder.test_fragment_to_document(markup) | ||
39 | |||
40 | def assertSoupEquals(self, to_parse, compare_parsed_to=None): | ||
41 | builder = self.default_builder | ||
42 | obj = BeautifulSoup(to_parse, builder=builder) | ||
43 | if compare_parsed_to is None: | ||
44 | compare_parsed_to = to_parse | ||
45 | |||
46 | self.assertEqual(obj.decode(), self.document_for(compare_parsed_to)) | ||
47 | |||
48 | def assertConnectedness(self, element): | ||
49 | """Ensure that next_element and previous_element are properly | ||
50 | set for all descendants of the given element. | ||
51 | """ | ||
52 | earlier = None | ||
53 | for e in element.descendants: | ||
54 | if earlier: | ||
55 | self.assertEqual(e, earlier.next_element) | ||
56 | self.assertEqual(earlier, e.previous_element) | ||
57 | earlier = e | ||
58 | |||
59 | class HTMLTreeBuilderSmokeTest(SoupTest): | ||
60 | |||
61 | """A basic test of a treebuilder's competence. | ||
62 | |||
63 | Any HTML treebuilder, present or future, should be able to pass | ||
64 | these tests. With invalid markup, there's room for interpretation, | ||
65 | and different parsers can handle it differently. But with the | ||
66 | markup in these tests, there's not much room for interpretation. | ||
67 | """ | ||
68 | |||
69 | def test_pickle_and_unpickle_identity(self): | ||
70 | # Pickling a tree, then unpickling it, yields a tree identical | ||
71 | # to the original. | ||
72 | tree = self.soup("<a><b>foo</a>") | ||
73 | dumped = pickle.dumps(tree, 2) | ||
74 | loaded = pickle.loads(dumped) | ||
75 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
76 | self.assertEqual(loaded.decode(), tree.decode()) | ||
77 | |||
78 | def assertDoctypeHandled(self, doctype_fragment): | ||
79 | """Assert that a given doctype string is handled correctly.""" | ||
80 | doctype_str, soup = self._document_with_doctype(doctype_fragment) | ||
81 | |||
82 | # Make sure a Doctype object was created. | ||
83 | doctype = soup.contents[0] | ||
84 | self.assertEqual(doctype.__class__, Doctype) | ||
85 | self.assertEqual(doctype, doctype_fragment) | ||
86 | self.assertEqual(str(soup)[:len(doctype_str)], doctype_str) | ||
87 | |||
88 | # Make sure that the doctype was correctly associated with the | ||
89 | # parse tree and that the rest of the document parsed. | ||
90 | self.assertEqual(soup.p.contents[0], 'foo') | ||
91 | |||
92 | def _document_with_doctype(self, doctype_fragment): | ||
93 | """Generate and parse a document with the given doctype.""" | ||
94 | doctype = '<!DOCTYPE %s>' % doctype_fragment | ||
95 | markup = doctype + '\n<p>foo</p>' | ||
96 | soup = self.soup(markup) | ||
97 | return doctype, soup | ||
98 | |||
99 | def test_normal_doctypes(self): | ||
100 | """Make sure normal, everyday HTML doctypes are handled correctly.""" | ||
101 | self.assertDoctypeHandled("html") | ||
102 | self.assertDoctypeHandled( | ||
103 | 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"') | ||
104 | |||
105 | def test_empty_doctype(self): | ||
106 | soup = self.soup("<!DOCTYPE>") | ||
107 | doctype = soup.contents[0] | ||
108 | self.assertEqual("", doctype.strip()) | ||
109 | |||
110 | def test_public_doctype_with_url(self): | ||
111 | doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"' | ||
112 | self.assertDoctypeHandled(doctype) | ||
113 | |||
114 | def test_system_doctype(self): | ||
115 | self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"') | ||
116 | |||
117 | def test_namespaced_system_doctype(self): | ||
118 | # We can handle a namespaced doctype with a system ID. | ||
119 | self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"') | ||
120 | |||
121 | def test_namespaced_public_doctype(self): | ||
122 | # Test a namespaced doctype with a public id. | ||
123 | self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"') | ||
124 | |||
125 | def test_real_xhtml_document(self): | ||
126 | """A real XHTML document should come out more or less the same as it went in.""" | ||
127 | markup = b"""<?xml version="1.0" encoding="utf-8"?> | ||
128 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"> | ||
129 | <html xmlns="http://www.w3.org/1999/xhtml"> | ||
130 | <head><title>Hello.</title></head> | ||
131 | <body>Goodbye.</body> | ||
132 | </html>""" | ||
133 | soup = self.soup(markup) | ||
134 | self.assertEqual( | ||
135 | soup.encode("utf-8").replace(b"\n", b""), | ||
136 | markup.replace(b"\n", b"")) | ||
137 | |||
138 | def test_processing_instruction(self): | ||
139 | markup = b"""<?PITarget PIContent?>""" | ||
140 | soup = self.soup(markup) | ||
141 | self.assertEqual(markup, soup.encode("utf8")) | ||
142 | |||
143 | def test_deepcopy(self): | ||
144 | """Make sure you can copy the tree builder. | ||
145 | |||
146 | This is important because the builder is part of a | ||
147 | BeautifulSoup object, and we want to be able to copy that. | ||
148 | """ | ||
149 | copy.deepcopy(self.default_builder) | ||
150 | |||
151 | def test_p_tag_is_never_empty_element(self): | ||
152 | """A <p> tag is never designated as an empty-element tag. | ||
153 | |||
154 | Even if the markup shows it as an empty-element tag, it | ||
155 | shouldn't be presented that way. | ||
156 | """ | ||
157 | soup = self.soup("<p/>") | ||
158 | self.assertFalse(soup.p.is_empty_element) | ||
159 | self.assertEqual(str(soup.p), "<p></p>") | ||
160 | |||
161 | def test_unclosed_tags_get_closed(self): | ||
162 | """A tag that's not closed by the end of the document should be closed. | ||
163 | |||
164 | This applies to all tags except empty-element tags. | ||
165 | """ | ||
166 | self.assertSoupEquals("<p>", "<p></p>") | ||
167 | self.assertSoupEquals("<b>", "<b></b>") | ||
168 | |||
169 | self.assertSoupEquals("<br>", "<br/>") | ||
170 | |||
171 | def test_br_is_always_empty_element_tag(self): | ||
172 | """A <br> tag is designated as an empty-element tag. | ||
173 | |||
174 | Some parsers treat <br></br> as one <br/> tag, some parsers as | ||
175 | two tags, but it should always be an empty-element tag. | ||
176 | """ | ||
177 | soup = self.soup("<br></br>") | ||
178 | self.assertTrue(soup.br.is_empty_element) | ||
179 | self.assertEqual(str(soup.br), "<br/>") | ||
180 | |||
181 | def test_nested_formatting_elements(self): | ||
182 | self.assertSoupEquals("<em><em></em></em>") | ||
183 | |||
184 | def test_double_head(self): | ||
185 | html = '''<!DOCTYPE html> | ||
186 | <html> | ||
187 | <head> | ||
188 | <title>Ordinary HEAD element test</title> | ||
189 | </head> | ||
190 | <script type="text/javascript"> | ||
191 | alert("Help!"); | ||
192 | </script> | ||
193 | <body> | ||
194 | Hello, world! | ||
195 | </body> | ||
196 | </html> | ||
197 | ''' | ||
198 | soup = self.soup(html) | ||
199 | self.assertEqual("text/javascript", soup.find('script')['type']) | ||
200 | |||
201 | def test_comment(self): | ||
202 | # Comments are represented as Comment objects. | ||
203 | markup = "<p>foo<!--foobar-->baz</p>" | ||
204 | self.assertSoupEquals(markup) | ||
205 | |||
206 | soup = self.soup(markup) | ||
207 | comment = soup.find(text="foobar") | ||
208 | self.assertEqual(comment.__class__, Comment) | ||
209 | |||
210 | # The comment is properly integrated into the tree. | ||
211 | foo = soup.find(text="foo") | ||
212 | self.assertEqual(comment, foo.next_element) | ||
213 | baz = soup.find(text="baz") | ||
214 | self.assertEqual(comment, baz.previous_element) | ||
215 | |||
216 | def test_preserved_whitespace_in_pre_and_textarea(self): | ||
217 | """Whitespace must be preserved in <pre> and <textarea> tags.""" | ||
218 | self.assertSoupEquals("<pre> </pre>") | ||
219 | self.assertSoupEquals("<textarea> woo </textarea>") | ||
220 | |||
221 | def test_nested_inline_elements(self): | ||
222 | """Inline elements can be nested indefinitely.""" | ||
223 | b_tag = "<b>Inside a B tag</b>" | ||
224 | self.assertSoupEquals(b_tag) | ||
225 | |||
226 | nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>" | ||
227 | self.assertSoupEquals(nested_b_tag) | ||
228 | |||
229 | double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>" | ||
230 | self.assertSoupEquals(nested_b_tag) | ||
231 | |||
232 | def test_nested_block_level_elements(self): | ||
233 | """Block elements can be nested.""" | ||
234 | soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>') | ||
235 | blockquote = soup.blockquote | ||
236 | self.assertEqual(blockquote.p.b.string, 'Foo') | ||
237 | self.assertEqual(blockquote.b.string, 'Foo') | ||
238 | |||
239 | def test_correctly_nested_tables(self): | ||
240 | """One table can go inside another one.""" | ||
241 | markup = ('<table id="1">' | ||
242 | '<tr>' | ||
243 | "<td>Here's another table:" | ||
244 | '<table id="2">' | ||
245 | '<tr><td>foo</td></tr>' | ||
246 | '</table></td>') | ||
247 | |||
248 | self.assertSoupEquals( | ||
249 | markup, | ||
250 | '<table id="1"><tr><td>Here\'s another table:' | ||
251 | '<table id="2"><tr><td>foo</td></tr></table>' | ||
252 | '</td></tr></table>') | ||
253 | |||
254 | self.assertSoupEquals( | ||
255 | "<table><thead><tr><td>Foo</td></tr></thead>" | ||
256 | "<tbody><tr><td>Bar</td></tr></tbody>" | ||
257 | "<tfoot><tr><td>Baz</td></tr></tfoot></table>") | ||
258 | |||
259 | def test_deeply_nested_multivalued_attribute(self): | ||
260 | # html5lib can set the attributes of the same tag many times | ||
261 | # as it rearranges the tree. This has caused problems with | ||
262 | # multivalued attributes. | ||
263 | markup = '<table><div><div class="css"></div></div></table>' | ||
264 | soup = self.soup(markup) | ||
265 | self.assertEqual(["css"], soup.div.div['class']) | ||
266 | |||
267 | def test_multivalued_attribute_on_html(self): | ||
268 | # html5lib uses a different API to set the attributes ot the | ||
269 | # <html> tag. This has caused problems with multivalued | ||
270 | # attributes. | ||
271 | markup = '<html class="a b"></html>' | ||
272 | soup = self.soup(markup) | ||
273 | self.assertEqual(["a", "b"], soup.html['class']) | ||
274 | |||
275 | def test_angle_brackets_in_attribute_values_are_escaped(self): | ||
276 | self.assertSoupEquals('<a b="<a>"></a>', '<a b="<a>"></a>') | ||
277 | |||
278 | def test_entities_in_attributes_converted_to_unicode(self): | ||
279 | expect = '<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>' | ||
280 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
281 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
282 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
283 | self.assertSoupEquals('<p id="piñata"></p>', expect) | ||
284 | |||
285 | def test_entities_in_text_converted_to_unicode(self): | ||
286 | expect = '<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>' | ||
287 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
288 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
289 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
290 | self.assertSoupEquals("<p>piñata</p>", expect) | ||
291 | |||
292 | def test_quot_entity_converted_to_quotation_mark(self): | ||
293 | self.assertSoupEquals("<p>I said "good day!"</p>", | ||
294 | '<p>I said "good day!"</p>') | ||
295 | |||
296 | def test_out_of_range_entity(self): | ||
297 | expect = "\N{REPLACEMENT CHARACTER}" | ||
298 | self.assertSoupEquals("�", expect) | ||
299 | self.assertSoupEquals("�", expect) | ||
300 | self.assertSoupEquals("�", expect) | ||
301 | |||
302 | def test_multipart_strings(self): | ||
303 | "Mostly to prevent a recurrence of a bug in the html5lib treebuilder." | ||
304 | soup = self.soup("<html><h2>\nfoo</h2><p></p></html>") | ||
305 | self.assertEqual("p", soup.h2.string.next_element.name) | ||
306 | self.assertEqual("p", soup.p.name) | ||
307 | self.assertConnectedness(soup) | ||
308 | |||
309 | def test_head_tag_between_head_and_body(self): | ||
310 | "Prevent recurrence of a bug in the html5lib treebuilder." | ||
311 | content = """<html><head></head> | ||
312 | <link></link> | ||
313 | <body>foo</body> | ||
314 | </html> | ||
315 | """ | ||
316 | soup = self.soup(content) | ||
317 | self.assertNotEqual(None, soup.html.body) | ||
318 | self.assertConnectedness(soup) | ||
319 | |||
320 | def test_multiple_copies_of_a_tag(self): | ||
321 | "Prevent recurrence of a bug in the html5lib treebuilder." | ||
322 | content = """<!DOCTYPE html> | ||
323 | <html> | ||
324 | <body> | ||
325 | <article id="a" > | ||
326 | <div><a href="1"></div> | ||
327 | <footer> | ||
328 | <a href="2"></a> | ||
329 | </footer> | ||
330 | </article> | ||
331 | </body> | ||
332 | </html> | ||
333 | """ | ||
334 | soup = self.soup(content) | ||
335 | self.assertConnectedness(soup.article) | ||
336 | |||
337 | def test_basic_namespaces(self): | ||
338 | """Parsers don't need to *understand* namespaces, but at the | ||
339 | very least they should not choke on namespaces or lose | ||
340 | data.""" | ||
341 | |||
342 | markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>' | ||
343 | soup = self.soup(markup) | ||
344 | self.assertEqual(markup, soup.encode()) | ||
345 | html = soup.html | ||
346 | self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns']) | ||
347 | self.assertEqual( | ||
348 | 'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml']) | ||
349 | self.assertEqual( | ||
350 | 'http://www.w3.org/2000/svg', soup.html['xmlns:svg']) | ||
351 | |||
352 | def test_multivalued_attribute_value_becomes_list(self): | ||
353 | markup = b'<a class="foo bar">' | ||
354 | soup = self.soup(markup) | ||
355 | self.assertEqual(['foo', 'bar'], soup.a['class']) | ||
356 | |||
357 | # | ||
358 | # Generally speaking, tests below this point are more tests of | ||
359 | # Beautiful Soup than tests of the tree builders. But parsers are | ||
360 | # weird, so we run these tests separately for every tree builder | ||
361 | # to detect any differences between them. | ||
362 | # | ||
363 | |||
364 | def test_can_parse_unicode_document(self): | ||
365 | # A seemingly innocuous document... but it's in Unicode! And | ||
366 | # it contains characters that can't be represented in the | ||
367 | # encoding found in the declaration! The horror! | ||
368 | markup = '<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>' | ||
369 | soup = self.soup(markup) | ||
370 | self.assertEqual('Sacr\xe9 bleu!', soup.body.string) | ||
371 | |||
372 | def test_soupstrainer(self): | ||
373 | """Parsers should be able to work with SoupStrainers.""" | ||
374 | strainer = SoupStrainer("b") | ||
375 | soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>", | ||
376 | parse_only=strainer) | ||
377 | self.assertEqual(soup.decode(), "<b>bold</b>") | ||
378 | |||
379 | def test_single_quote_attribute_values_become_double_quotes(self): | ||
380 | self.assertSoupEquals("<foo attr='bar'></foo>", | ||
381 | '<foo attr="bar"></foo>') | ||
382 | |||
383 | def test_attribute_values_with_nested_quotes_are_left_alone(self): | ||
384 | text = """<foo attr='bar "brawls" happen'>a</foo>""" | ||
385 | self.assertSoupEquals(text) | ||
386 | |||
387 | def test_attribute_values_with_double_nested_quotes_get_quoted(self): | ||
388 | text = """<foo attr='bar "brawls" happen'>a</foo>""" | ||
389 | soup = self.soup(text) | ||
390 | soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"' | ||
391 | self.assertSoupEquals( | ||
392 | soup.foo.decode(), | ||
393 | """<foo attr="Brawls happen at "Bob\'s Bar"">a</foo>""") | ||
394 | |||
395 | def test_ampersand_in_attribute_value_gets_escaped(self): | ||
396 | self.assertSoupEquals('<this is="really messed up & stuff"></this>', | ||
397 | '<this is="really messed up & stuff"></this>') | ||
398 | |||
399 | self.assertSoupEquals( | ||
400 | '<a href="http://example.org?a=1&b=2;3">foo</a>', | ||
401 | '<a href="http://example.org?a=1&b=2;3">foo</a>') | ||
402 | |||
403 | def test_escaped_ampersand_in_attribute_value_is_left_alone(self): | ||
404 | self.assertSoupEquals('<a href="http://example.org?a=1&b=2;3"></a>') | ||
405 | |||
406 | def test_entities_in_strings_converted_during_parsing(self): | ||
407 | # Both XML and HTML entities are converted to Unicode characters | ||
408 | # during parsing. | ||
409 | text = "<p><<sacré bleu!>></p>" | ||
410 | expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>" | ||
411 | self.assertSoupEquals(text, expected) | ||
412 | |||
413 | def test_smart_quotes_converted_on_the_way_in(self): | ||
414 | # Microsoft smart quotes are converted to Unicode characters during | ||
415 | # parsing. | ||
416 | quote = b"<p>\x91Foo\x92</p>" | ||
417 | soup = self.soup(quote) | ||
418 | self.assertEqual( | ||
419 | soup.p.string, | ||
420 | "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}") | ||
421 | |||
422 | def test_non_breaking_spaces_converted_on_the_way_in(self): | ||
423 | soup = self.soup("<a> </a>") | ||
424 | self.assertEqual(soup.a.string, "\N{NO-BREAK SPACE}" * 2) | ||
425 | |||
426 | def test_entities_converted_on_the_way_out(self): | ||
427 | text = "<p><<sacré bleu!>></p>" | ||
428 | expected = "<p><<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></p>".encode("utf-8") | ||
429 | soup = self.soup(text) | ||
430 | self.assertEqual(soup.p.encode("utf-8"), expected) | ||
431 | |||
432 | def test_real_iso_latin_document(self): | ||
433 | # Smoke test of interrelated functionality, using an | ||
434 | # easy-to-understand document. | ||
435 | |||
436 | # Here it is in Unicode. Note that it claims to be in ISO-Latin-1. | ||
437 | unicode_html = '<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>' | ||
438 | |||
439 | # That's because we're going to encode it into ISO-Latin-1, and use | ||
440 | # that to test. | ||
441 | iso_latin_html = unicode_html.encode("iso-8859-1") | ||
442 | |||
443 | # Parse the ISO-Latin-1 HTML. | ||
444 | soup = self.soup(iso_latin_html) | ||
445 | # Encode it to UTF-8. | ||
446 | result = soup.encode("utf-8") | ||
447 | |||
448 | # What do we expect the result to look like? Well, it would | ||
449 | # look like unicode_html, except that the META tag would say | ||
450 | # UTF-8 instead of ISO-Latin-1. | ||
451 | expected = unicode_html.replace("ISO-Latin-1", "utf-8") | ||
452 | |||
453 | # And, of course, it would be in UTF-8, not Unicode. | ||
454 | expected = expected.encode("utf-8") | ||
455 | |||
456 | # Ta-da! | ||
457 | self.assertEqual(result, expected) | ||
458 | |||
459 | def test_real_shift_jis_document(self): | ||
460 | # Smoke test to make sure the parser can handle a document in | ||
461 | # Shift-JIS encoding, without choking. | ||
462 | shift_jis_html = ( | ||
463 | b'<html><head></head><body><pre>' | ||
464 | b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f' | ||
465 | b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c' | ||
466 | b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B' | ||
467 | b'</pre></body></html>') | ||
468 | unicode_html = shift_jis_html.decode("shift-jis") | ||
469 | soup = self.soup(unicode_html) | ||
470 | |||
471 | # Make sure the parse tree is correctly encoded to various | ||
472 | # encodings. | ||
473 | self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8")) | ||
474 | self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp")) | ||
475 | |||
476 | def test_real_hebrew_document(self): | ||
477 | # A real-world test to make sure we can convert ISO-8859-9 (a | ||
478 | # Hebrew encoding) to UTF-8. | ||
479 | hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>' | ||
480 | soup = self.soup( | ||
481 | hebrew_document, from_encoding="iso8859-8") | ||
482 | self.assertEqual(soup.original_encoding, 'iso8859-8') | ||
483 | self.assertEqual( | ||
484 | soup.encode('utf-8'), | ||
485 | hebrew_document.decode("iso8859-8").encode("utf-8")) | ||
486 | |||
487 | def test_meta_tag_reflects_current_encoding(self): | ||
488 | # Here's the <meta> tag saying that a document is | ||
489 | # encoded in Shift-JIS. | ||
490 | meta_tag = ('<meta content="text/html; charset=x-sjis" ' | ||
491 | 'http-equiv="Content-type"/>') | ||
492 | |||
493 | # Here's a document incorporating that meta tag. | ||
494 | shift_jis_html = ( | ||
495 | '<html><head>\n%s\n' | ||
496 | '<meta http-equiv="Content-language" content="ja"/>' | ||
497 | '</head><body>Shift-JIS markup goes here.') % meta_tag | ||
498 | soup = self.soup(shift_jis_html) | ||
499 | |||
500 | # Parse the document, and the charset is seemingly unaffected. | ||
501 | parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'}) | ||
502 | content = parsed_meta['content'] | ||
503 | self.assertEqual('text/html; charset=x-sjis', content) | ||
504 | |||
505 | # But that value is actually a ContentMetaAttributeValue object. | ||
506 | self.assertTrue(isinstance(content, ContentMetaAttributeValue)) | ||
507 | |||
508 | # And it will take on a value that reflects its current | ||
509 | # encoding. | ||
510 | self.assertEqual('text/html; charset=utf8', content.encode("utf8")) | ||
511 | |||
512 | # For the rest of the story, see TestSubstitutions in | ||
513 | # test_tree.py. | ||
514 | |||
515 | def test_html5_style_meta_tag_reflects_current_encoding(self): | ||
516 | # Here's the <meta> tag saying that a document is | ||
517 | # encoded in Shift-JIS. | ||
518 | meta_tag = ('<meta id="encoding" charset="x-sjis" />') | ||
519 | |||
520 | # Here's a document incorporating that meta tag. | ||
521 | shift_jis_html = ( | ||
522 | '<html><head>\n%s\n' | ||
523 | '<meta http-equiv="Content-language" content="ja"/>' | ||
524 | '</head><body>Shift-JIS markup goes here.') % meta_tag | ||
525 | soup = self.soup(shift_jis_html) | ||
526 | |||
527 | # Parse the document, and the charset is seemingly unaffected. | ||
528 | parsed_meta = soup.find('meta', id="encoding") | ||
529 | charset = parsed_meta['charset'] | ||
530 | self.assertEqual('x-sjis', charset) | ||
531 | |||
532 | # But that value is actually a CharsetMetaAttributeValue object. | ||
533 | self.assertTrue(isinstance(charset, CharsetMetaAttributeValue)) | ||
534 | |||
535 | # And it will take on a value that reflects its current | ||
536 | # encoding. | ||
537 | self.assertEqual('utf8', charset.encode("utf8")) | ||
538 | |||
539 | def test_tag_with_no_attributes_can_have_attributes_added(self): | ||
540 | data = self.soup("<a>text</a>") | ||
541 | data.a['foo'] = 'bar' | ||
542 | self.assertEqual('<a foo="bar">text</a>', data.a.decode()) | ||
543 | |||
544 | class XMLTreeBuilderSmokeTest(SoupTest): | ||
545 | |||
546 | def test_pickle_and_unpickle_identity(self): | ||
547 | # Pickling a tree, then unpickling it, yields a tree identical | ||
548 | # to the original. | ||
549 | tree = self.soup("<a><b>foo</a>") | ||
550 | dumped = pickle.dumps(tree, 2) | ||
551 | loaded = pickle.loads(dumped) | ||
552 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
553 | self.assertEqual(loaded.decode(), tree.decode()) | ||
554 | |||
555 | def test_docstring_generated(self): | ||
556 | soup = self.soup("<root/>") | ||
557 | self.assertEqual( | ||
558 | soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>') | ||
559 | |||
560 | def test_xml_declaration(self): | ||
561 | markup = b"""<?xml version="1.0" encoding="utf8"?>\n<foo/>""" | ||
562 | soup = self.soup(markup) | ||
563 | self.assertEqual(markup, soup.encode("utf8")) | ||
564 | |||
565 | def test_real_xhtml_document(self): | ||
566 | """A real XHTML document should come out *exactly* the same as it went in.""" | ||
567 | markup = b"""<?xml version="1.0" encoding="utf-8"?> | ||
568 | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"> | ||
569 | <html xmlns="http://www.w3.org/1999/xhtml"> | ||
570 | <head><title>Hello.</title></head> | ||
571 | <body>Goodbye.</body> | ||
572 | </html>""" | ||
573 | soup = self.soup(markup) | ||
574 | self.assertEqual( | ||
575 | soup.encode("utf-8"), markup) | ||
576 | |||
577 | def test_formatter_processes_script_tag_for_xml_documents(self): | ||
578 | doc = """ | ||
579 | <script type="text/javascript"> | ||
580 | </script> | ||
581 | """ | ||
582 | soup = BeautifulSoup(doc, "lxml-xml") | ||
583 | # lxml would have stripped this while parsing, but we can add | ||
584 | # it later. | ||
585 | soup.script.string = 'console.log("< < hey > > ");' | ||
586 | encoded = soup.encode() | ||
587 | self.assertTrue(b"< < hey > >" in encoded) | ||
588 | |||
589 | def test_can_parse_unicode_document(self): | ||
590 | markup = '<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>' | ||
591 | soup = self.soup(markup) | ||
592 | self.assertEqual('Sacr\xe9 bleu!', soup.root.string) | ||
593 | |||
594 | def test_popping_namespaced_tag(self): | ||
595 | markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>' | ||
596 | soup = self.soup(markup) | ||
597 | self.assertEqual( | ||
598 | str(soup.rss), markup) | ||
599 | |||
600 | def test_docstring_includes_correct_encoding(self): | ||
601 | soup = self.soup("<root/>") | ||
602 | self.assertEqual( | ||
603 | soup.encode("latin1"), | ||
604 | b'<?xml version="1.0" encoding="latin1"?>\n<root/>') | ||
605 | |||
606 | def test_large_xml_document(self): | ||
607 | """A large XML document should come out the same as it went in.""" | ||
608 | markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>' | ||
609 | + b'0' * (2**12) | ||
610 | + b'</root>') | ||
611 | soup = self.soup(markup) | ||
612 | self.assertEqual(soup.encode("utf-8"), markup) | ||
613 | |||
614 | |||
615 | def test_tags_are_empty_element_if_and_only_if_they_are_empty(self): | ||
616 | self.assertSoupEquals("<p>", "<p/>") | ||
617 | self.assertSoupEquals("<p>foo</p>") | ||
618 | |||
619 | def test_namespaces_are_preserved(self): | ||
620 | markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>' | ||
621 | soup = self.soup(markup) | ||
622 | root = soup.root | ||
623 | self.assertEqual("http://example.com/", root['xmlns:a']) | ||
624 | self.assertEqual("http://example.net/", root['xmlns:b']) | ||
625 | |||
626 | def test_closing_namespaced_tag(self): | ||
627 | markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>' | ||
628 | soup = self.soup(markup) | ||
629 | self.assertEqual(str(soup.p), markup) | ||
630 | |||
631 | def test_namespaced_attributes(self): | ||
632 | markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>' | ||
633 | soup = self.soup(markup) | ||
634 | self.assertEqual(str(soup.foo), markup) | ||
635 | |||
636 | def test_namespaced_attributes_xml_namespace(self): | ||
637 | markup = '<foo xml:lang="fr">bar</foo>' | ||
638 | soup = self.soup(markup) | ||
639 | self.assertEqual(str(soup.foo), markup) | ||
640 | |||
641 | class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest): | ||
642 | """Smoke test for a tree builder that supports HTML5.""" | ||
643 | |||
644 | def test_real_xhtml_document(self): | ||
645 | # Since XHTML is not HTML5, HTML5 parsers are not tested to handle | ||
646 | # XHTML documents in any particular way. | ||
647 | pass | ||
648 | |||
649 | def test_html_tags_have_namespace(self): | ||
650 | markup = "<a>" | ||
651 | soup = self.soup(markup) | ||
652 | self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace) | ||
653 | |||
654 | def test_svg_tags_have_namespace(self): | ||
655 | markup = '<svg><circle/></svg>' | ||
656 | soup = self.soup(markup) | ||
657 | namespace = "http://www.w3.org/2000/svg" | ||
658 | self.assertEqual(namespace, soup.svg.namespace) | ||
659 | self.assertEqual(namespace, soup.circle.namespace) | ||
660 | |||
661 | |||
662 | def test_mathml_tags_have_namespace(self): | ||
663 | markup = '<math><msqrt>5</msqrt></math>' | ||
664 | soup = self.soup(markup) | ||
665 | namespace = 'http://www.w3.org/1998/Math/MathML' | ||
666 | self.assertEqual(namespace, soup.math.namespace) | ||
667 | self.assertEqual(namespace, soup.msqrt.namespace) | ||
668 | |||
669 | def test_xml_declaration_becomes_comment(self): | ||
670 | markup = '<?xml version="1.0" encoding="utf-8"?><html></html>' | ||
671 | soup = self.soup(markup) | ||
672 | self.assertTrue(isinstance(soup.contents[0], Comment)) | ||
673 | self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?') | ||
674 | self.assertEqual("html", soup.contents[0].next_element.name) | ||
675 | |||
676 | def skipIf(condition, reason): | ||
677 | def nothing(test, *args, **kwargs): | ||
678 | return None | ||
679 | |||
680 | def decorator(test_item): | ||
681 | if condition: | ||
682 | return nothing | ||
683 | else: | ||
684 | return test_item | ||
685 | |||
686 | return decorator | ||
diff --git a/bitbake/lib/bs4/tests/__init__.py b/bitbake/lib/bs4/tests/__init__.py deleted file mode 100644 index 142c8cc3f1..0000000000 --- a/bitbake/lib/bs4/tests/__init__.py +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | "The beautifulsoup tests." | ||
diff --git a/bitbake/lib/bs4/tests/test_builder_registry.py b/bitbake/lib/bs4/tests/test_builder_registry.py deleted file mode 100644 index 90cad82933..0000000000 --- a/bitbake/lib/bs4/tests/test_builder_registry.py +++ /dev/null | |||
@@ -1,147 +0,0 @@ | |||
1 | """Tests of the builder registry.""" | ||
2 | |||
3 | import unittest | ||
4 | import warnings | ||
5 | |||
6 | from bs4 import BeautifulSoup | ||
7 | from bs4.builder import ( | ||
8 | builder_registry as registry, | ||
9 | HTMLParserTreeBuilder, | ||
10 | TreeBuilderRegistry, | ||
11 | ) | ||
12 | |||
13 | try: | ||
14 | from bs4.builder import HTML5TreeBuilder | ||
15 | HTML5LIB_PRESENT = True | ||
16 | except ImportError: | ||
17 | HTML5LIB_PRESENT = False | ||
18 | |||
19 | try: | ||
20 | from bs4.builder import ( | ||
21 | LXMLTreeBuilderForXML, | ||
22 | LXMLTreeBuilder, | ||
23 | ) | ||
24 | LXML_PRESENT = True | ||
25 | except ImportError: | ||
26 | LXML_PRESENT = False | ||
27 | |||
28 | |||
29 | class BuiltInRegistryTest(unittest.TestCase): | ||
30 | """Test the built-in registry with the default builders registered.""" | ||
31 | |||
32 | def test_combination(self): | ||
33 | if LXML_PRESENT: | ||
34 | self.assertEqual(registry.lookup('fast', 'html'), | ||
35 | LXMLTreeBuilder) | ||
36 | |||
37 | if LXML_PRESENT: | ||
38 | self.assertEqual(registry.lookup('permissive', 'xml'), | ||
39 | LXMLTreeBuilderForXML) | ||
40 | self.assertEqual(registry.lookup('strict', 'html'), | ||
41 | HTMLParserTreeBuilder) | ||
42 | if HTML5LIB_PRESENT: | ||
43 | self.assertEqual(registry.lookup('html5lib', 'html'), | ||
44 | HTML5TreeBuilder) | ||
45 | |||
46 | def test_lookup_by_markup_type(self): | ||
47 | if LXML_PRESENT: | ||
48 | self.assertEqual(registry.lookup('html'), LXMLTreeBuilder) | ||
49 | self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML) | ||
50 | else: | ||
51 | self.assertEqual(registry.lookup('xml'), None) | ||
52 | if HTML5LIB_PRESENT: | ||
53 | self.assertEqual(registry.lookup('html'), HTML5TreeBuilder) | ||
54 | else: | ||
55 | self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder) | ||
56 | |||
57 | def test_named_library(self): | ||
58 | if LXML_PRESENT: | ||
59 | self.assertEqual(registry.lookup('lxml', 'xml'), | ||
60 | LXMLTreeBuilderForXML) | ||
61 | self.assertEqual(registry.lookup('lxml', 'html'), | ||
62 | LXMLTreeBuilder) | ||
63 | if HTML5LIB_PRESENT: | ||
64 | self.assertEqual(registry.lookup('html5lib'), | ||
65 | HTML5TreeBuilder) | ||
66 | |||
67 | self.assertEqual(registry.lookup('html.parser'), | ||
68 | HTMLParserTreeBuilder) | ||
69 | |||
70 | def test_beautifulsoup_constructor_does_lookup(self): | ||
71 | |||
72 | with warnings.catch_warnings(record=True) as w: | ||
73 | # This will create a warning about not explicitly | ||
74 | # specifying a parser, but we'll ignore it. | ||
75 | |||
76 | # You can pass in a string. | ||
77 | BeautifulSoup("", features="html") | ||
78 | # Or a list of strings. | ||
79 | BeautifulSoup("", features=["html", "fast"]) | ||
80 | |||
81 | # You'll get an exception if BS can't find an appropriate | ||
82 | # builder. | ||
83 | self.assertRaises(ValueError, BeautifulSoup, | ||
84 | "", features="no-such-feature") | ||
85 | |||
86 | class RegistryTest(unittest.TestCase): | ||
87 | """Test the TreeBuilderRegistry class in general.""" | ||
88 | |||
89 | def setUp(self): | ||
90 | self.registry = TreeBuilderRegistry() | ||
91 | |||
92 | def builder_for_features(self, *feature_list): | ||
93 | cls = type('Builder_' + '_'.join(feature_list), | ||
94 | (object,), {'features' : feature_list}) | ||
95 | |||
96 | self.registry.register(cls) | ||
97 | return cls | ||
98 | |||
99 | def test_register_with_no_features(self): | ||
100 | builder = self.builder_for_features() | ||
101 | |||
102 | # Since the builder advertises no features, you can't find it | ||
103 | # by looking up features. | ||
104 | self.assertEqual(self.registry.lookup('foo'), None) | ||
105 | |||
106 | # But you can find it by doing a lookup with no features, if | ||
107 | # this happens to be the only registered builder. | ||
108 | self.assertEqual(self.registry.lookup(), builder) | ||
109 | |||
110 | def test_register_with_features_makes_lookup_succeed(self): | ||
111 | builder = self.builder_for_features('foo', 'bar') | ||
112 | self.assertEqual(self.registry.lookup('foo'), builder) | ||
113 | self.assertEqual(self.registry.lookup('bar'), builder) | ||
114 | |||
115 | def test_lookup_fails_when_no_builder_implements_feature(self): | ||
116 | builder = self.builder_for_features('foo', 'bar') | ||
117 | self.assertEqual(self.registry.lookup('baz'), None) | ||
118 | |||
119 | def test_lookup_gets_most_recent_registration_when_no_feature_specified(self): | ||
120 | builder1 = self.builder_for_features('foo') | ||
121 | builder2 = self.builder_for_features('bar') | ||
122 | self.assertEqual(self.registry.lookup(), builder2) | ||
123 | |||
124 | def test_lookup_fails_when_no_tree_builders_registered(self): | ||
125 | self.assertEqual(self.registry.lookup(), None) | ||
126 | |||
127 | def test_lookup_gets_most_recent_builder_supporting_all_features(self): | ||
128 | has_one = self.builder_for_features('foo') | ||
129 | has_the_other = self.builder_for_features('bar') | ||
130 | has_both_early = self.builder_for_features('foo', 'bar', 'baz') | ||
131 | has_both_late = self.builder_for_features('foo', 'bar', 'quux') | ||
132 | lacks_one = self.builder_for_features('bar') | ||
133 | has_the_other = self.builder_for_features('foo') | ||
134 | |||
135 | # There are two builders featuring 'foo' and 'bar', but | ||
136 | # the one that also features 'quux' was registered later. | ||
137 | self.assertEqual(self.registry.lookup('foo', 'bar'), | ||
138 | has_both_late) | ||
139 | |||
140 | # There is only one builder featuring 'foo', 'bar', and 'baz'. | ||
141 | self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'), | ||
142 | has_both_early) | ||
143 | |||
144 | def test_lookup_fails_when_cannot_reconcile_requested_features(self): | ||
145 | builder1 = self.builder_for_features('foo', 'bar') | ||
146 | builder2 = self.builder_for_features('foo', 'baz') | ||
147 | self.assertEqual(self.registry.lookup('bar', 'baz'), None) | ||
diff --git a/bitbake/lib/bs4/tests/test_docs.py b/bitbake/lib/bs4/tests/test_docs.py deleted file mode 100644 index d1d76a33bf..0000000000 --- a/bitbake/lib/bs4/tests/test_docs.py +++ /dev/null | |||
@@ -1,32 +0,0 @@ | |||
1 | "Test harness for doctests." | ||
2 | |||
3 | # pylint: disable-msg=E0611,W0142 | ||
4 | |||
5 | __metaclass__ = type | ||
6 | __all__ = [ | ||
7 | 'additional_tests', | ||
8 | ] | ||
9 | |||
10 | import doctest | ||
11 | #from pkg_resources import ( | ||
12 | # resource_filename, resource_exists, resource_listdir, cleanup_resources) | ||
13 | |||
14 | DOCTEST_FLAGS = ( | ||
15 | doctest.ELLIPSIS | | ||
16 | doctest.NORMALIZE_WHITESPACE | | ||
17 | doctest.REPORT_NDIFF) | ||
18 | |||
19 | # def additional_tests(): | ||
20 | # "Run the doc tests (README.txt and docs/*, if any exist)" | ||
21 | # doctest_files = [ | ||
22 | # os.path.abspath(resource_filename('bs4', 'README.txt'))] | ||
23 | # if resource_exists('bs4', 'docs'): | ||
24 | # for name in resource_listdir('bs4', 'docs'): | ||
25 | # if name.endswith('.txt'): | ||
26 | # doctest_files.append( | ||
27 | # os.path.abspath( | ||
28 | # resource_filename('bs4', 'docs/%s' % name))) | ||
29 | # kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS) | ||
30 | # atexit.register(cleanup_resources) | ||
31 | # return unittest.TestSuite(( | ||
32 | # doctest.DocFileSuite(*doctest_files, **kwargs))) | ||
diff --git a/bitbake/lib/bs4/tests/test_html5lib.py b/bitbake/lib/bs4/tests/test_html5lib.py deleted file mode 100644 index a7494ca5ba..0000000000 --- a/bitbake/lib/bs4/tests/test_html5lib.py +++ /dev/null | |||
@@ -1,98 +0,0 @@ | |||
1 | """Tests to ensure that the html5lib tree builder generates good trees.""" | ||
2 | |||
3 | import warnings | ||
4 | |||
5 | try: | ||
6 | from bs4.builder import HTML5TreeBuilder | ||
7 | HTML5LIB_PRESENT = True | ||
8 | except ImportError as e: | ||
9 | HTML5LIB_PRESENT = False | ||
10 | from bs4.element import SoupStrainer | ||
11 | from bs4.testing import ( | ||
12 | HTML5TreeBuilderSmokeTest, | ||
13 | SoupTest, | ||
14 | skipIf, | ||
15 | ) | ||
16 | |||
17 | @skipIf( | ||
18 | not HTML5LIB_PRESENT, | ||
19 | "html5lib seems not to be present, not testing its tree builder.") | ||
20 | class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): | ||
21 | """See ``HTML5TreeBuilderSmokeTest``.""" | ||
22 | |||
23 | @property | ||
24 | def default_builder(self): | ||
25 | return HTML5TreeBuilder() | ||
26 | |||
27 | def test_soupstrainer(self): | ||
28 | # The html5lib tree builder does not support SoupStrainers. | ||
29 | strainer = SoupStrainer("b") | ||
30 | markup = "<p>A <b>bold</b> statement.</p>" | ||
31 | with warnings.catch_warnings(record=True) as w: | ||
32 | soup = self.soup(markup, parse_only=strainer) | ||
33 | self.assertEqual( | ||
34 | soup.decode(), self.document_for(markup)) | ||
35 | |||
36 | self.assertTrue( | ||
37 | "the html5lib tree builder doesn't support parse_only" in | ||
38 | str(w[0].message)) | ||
39 | |||
40 | def test_correctly_nested_tables(self): | ||
41 | """html5lib inserts <tbody> tags where other parsers don't.""" | ||
42 | markup = ('<table id="1">' | ||
43 | '<tr>' | ||
44 | "<td>Here's another table:" | ||
45 | '<table id="2">' | ||
46 | '<tr><td>foo</td></tr>' | ||
47 | '</table></td>') | ||
48 | |||
49 | self.assertSoupEquals( | ||
50 | markup, | ||
51 | '<table id="1"><tbody><tr><td>Here\'s another table:' | ||
52 | '<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>' | ||
53 | '</td></tr></tbody></table>') | ||
54 | |||
55 | self.assertSoupEquals( | ||
56 | "<table><thead><tr><td>Foo</td></tr></thead>" | ||
57 | "<tbody><tr><td>Bar</td></tr></tbody>" | ||
58 | "<tfoot><tr><td>Baz</td></tr></tfoot></table>") | ||
59 | |||
60 | def test_xml_declaration_followed_by_doctype(self): | ||
61 | markup = '''<?xml version="1.0" encoding="utf-8"?> | ||
62 | <!DOCTYPE html> | ||
63 | <html> | ||
64 | <head> | ||
65 | </head> | ||
66 | <body> | ||
67 | <p>foo</p> | ||
68 | </body> | ||
69 | </html>''' | ||
70 | soup = self.soup(markup) | ||
71 | # Verify that we can reach the <p> tag; this means the tree is connected. | ||
72 | self.assertEqual(b"<p>foo</p>", soup.p.encode()) | ||
73 | |||
74 | def test_reparented_markup(self): | ||
75 | markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>' | ||
76 | soup = self.soup(markup) | ||
77 | self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode()) | ||
78 | self.assertEqual(2, len(soup.find_all('p'))) | ||
79 | |||
80 | |||
81 | def test_reparented_markup_ends_with_whitespace(self): | ||
82 | markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n' | ||
83 | soup = self.soup(markup) | ||
84 | self.assertEqual("<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode()) | ||
85 | self.assertEqual(2, len(soup.find_all('p'))) | ||
86 | |||
87 | def test_processing_instruction(self): | ||
88 | """Processing instructions become comments.""" | ||
89 | markup = b"""<?PITarget PIContent?>""" | ||
90 | soup = self.soup(markup) | ||
91 | assert str(soup).startswith("<!--?PITarget PIContent?-->") | ||
92 | |||
93 | def test_cloned_multivalue_node(self): | ||
94 | markup = b"""<a class="my_class"><p></a>""" | ||
95 | soup = self.soup(markup) | ||
96 | a1, a2 = soup.find_all('a') | ||
97 | self.assertEqual(a1, a2) | ||
98 | assert a1 is not a2 | ||
diff --git a/bitbake/lib/bs4/tests/test_htmlparser.py b/bitbake/lib/bs4/tests/test_htmlparser.py deleted file mode 100644 index 30a25e6709..0000000000 --- a/bitbake/lib/bs4/tests/test_htmlparser.py +++ /dev/null | |||
@@ -1,31 +0,0 @@ | |||
1 | """Tests to ensure that the html.parser tree builder generates good | ||
2 | trees.""" | ||
3 | |||
4 | import pickle | ||
5 | from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest | ||
6 | from bs4.builder import HTMLParserTreeBuilder | ||
7 | |||
8 | class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): | ||
9 | |||
10 | @property | ||
11 | def default_builder(self): | ||
12 | return HTMLParserTreeBuilder() | ||
13 | |||
14 | def test_namespaced_system_doctype(self): | ||
15 | # html.parser can't handle namespaced doctypes, so skip this one. | ||
16 | pass | ||
17 | |||
18 | def test_namespaced_public_doctype(self): | ||
19 | # html.parser can't handle namespaced doctypes, so skip this one. | ||
20 | pass | ||
21 | |||
22 | def test_builder_is_pickled(self): | ||
23 | """Unlike most tree builders, HTMLParserTreeBuilder and will | ||
24 | be restored after pickling. | ||
25 | """ | ||
26 | tree = self.soup("<a><b>foo</a>") | ||
27 | dumped = pickle.dumps(tree, 2) | ||
28 | loaded = pickle.loads(dumped) | ||
29 | self.assertTrue(isinstance(loaded.builder, type(tree.builder))) | ||
30 | |||
31 | |||
diff --git a/bitbake/lib/bs4/tests/test_lxml.py b/bitbake/lib/bs4/tests/test_lxml.py deleted file mode 100644 index 6b6cdd07cb..0000000000 --- a/bitbake/lib/bs4/tests/test_lxml.py +++ /dev/null | |||
@@ -1,70 +0,0 @@ | |||
1 | """Tests to ensure that the lxml tree builder generates good trees.""" | ||
2 | |||
3 | import warnings | ||
4 | |||
5 | try: | ||
6 | import lxml.etree | ||
7 | LXML_PRESENT = True | ||
8 | LXML_VERSION = lxml.etree.LXML_VERSION | ||
9 | except ImportError as e: | ||
10 | LXML_PRESENT = False | ||
11 | LXML_VERSION = (0,) | ||
12 | |||
13 | if LXML_PRESENT: | ||
14 | from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML | ||
15 | |||
16 | from bs4 import BeautifulStoneSoup | ||
17 | from bs4.testing import skipIf | ||
18 | from bs4.testing import ( | ||
19 | HTMLTreeBuilderSmokeTest, | ||
20 | XMLTreeBuilderSmokeTest, | ||
21 | SoupTest, | ||
22 | skipIf, | ||
23 | ) | ||
24 | |||
25 | @skipIf( | ||
26 | not LXML_PRESENT, | ||
27 | "lxml seems not to be present, not testing its tree builder.") | ||
28 | class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest): | ||
29 | """See ``HTMLTreeBuilderSmokeTest``.""" | ||
30 | |||
31 | @property | ||
32 | def default_builder(self): | ||
33 | return LXMLTreeBuilder() | ||
34 | |||
35 | def test_out_of_range_entity(self): | ||
36 | self.assertSoupEquals( | ||
37 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
38 | self.assertSoupEquals( | ||
39 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
40 | self.assertSoupEquals( | ||
41 | "<p>foo�bar</p>", "<p>foobar</p>") | ||
42 | |||
43 | # In lxml < 2.3.5, an empty doctype causes a segfault. Skip this | ||
44 | # test if an old version of lxml is installed. | ||
45 | |||
46 | @skipIf( | ||
47 | not LXML_PRESENT or LXML_VERSION < (2,3,5,0), | ||
48 | "Skipping doctype test for old version of lxml to avoid segfault.") | ||
49 | def test_empty_doctype(self): | ||
50 | soup = self.soup("<!DOCTYPE>") | ||
51 | doctype = soup.contents[0] | ||
52 | self.assertEqual("", doctype.strip()) | ||
53 | |||
54 | def test_beautifulstonesoup_is_xml_parser(self): | ||
55 | # Make sure that the deprecated BSS class uses an xml builder | ||
56 | # if one is installed. | ||
57 | with warnings.catch_warnings(record=True) as w: | ||
58 | soup = BeautifulStoneSoup("<b />") | ||
59 | self.assertEqual("<b/>", str(soup.b)) | ||
60 | self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message)) | ||
61 | |||
62 | @skipIf( | ||
63 | not LXML_PRESENT, | ||
64 | "lxml seems not to be present, not testing its XML tree builder.") | ||
65 | class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest): | ||
66 | """See ``HTMLTreeBuilderSmokeTest``.""" | ||
67 | |||
68 | @property | ||
69 | def default_builder(self): | ||
70 | return LXMLTreeBuilderForXML() | ||
diff --git a/bitbake/lib/bs4/tests/test_soup.py b/bitbake/lib/bs4/tests/test_soup.py deleted file mode 100644 index 6ad3cb3765..0000000000 --- a/bitbake/lib/bs4/tests/test_soup.py +++ /dev/null | |||
@@ -1,479 +0,0 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | """Tests of Beautiful Soup as a whole.""" | ||
3 | |||
4 | import logging | ||
5 | import unittest | ||
6 | import sys | ||
7 | import tempfile | ||
8 | |||
9 | from bs4 import BeautifulSoup | ||
10 | from bs4.element import ( | ||
11 | CharsetMetaAttributeValue, | ||
12 | ContentMetaAttributeValue, | ||
13 | SoupStrainer, | ||
14 | NamespacedAttribute, | ||
15 | ) | ||
16 | import bs4.dammit | ||
17 | from bs4.dammit import ( | ||
18 | EntitySubstitution, | ||
19 | UnicodeDammit, | ||
20 | EncodingDetector, | ||
21 | ) | ||
22 | from bs4.testing import ( | ||
23 | SoupTest, | ||
24 | skipIf, | ||
25 | ) | ||
26 | import warnings | ||
27 | |||
28 | try: | ||
29 | from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML | ||
30 | LXML_PRESENT = True | ||
31 | except ImportError as e: | ||
32 | LXML_PRESENT = False | ||
33 | |||
34 | PYTHON_2_PRE_2_7 = (sys.version_info < (2,7)) | ||
35 | PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2)) | ||
36 | |||
37 | class TestConstructor(SoupTest): | ||
38 | |||
39 | def test_short_unicode_input(self): | ||
40 | data = "<h1>éé</h1>" | ||
41 | soup = self.soup(data) | ||
42 | self.assertEqual("éé", soup.h1.string) | ||
43 | |||
44 | def test_embedded_null(self): | ||
45 | data = "<h1>foo\0bar</h1>" | ||
46 | soup = self.soup(data) | ||
47 | self.assertEqual("foo\0bar", soup.h1.string) | ||
48 | |||
49 | def test_exclude_encodings(self): | ||
50 | utf8_data = "Räksmörgås".encode("utf-8") | ||
51 | soup = self.soup(utf8_data, exclude_encodings=["utf-8"]) | ||
52 | self.assertEqual("windows-1252", soup.original_encoding) | ||
53 | |||
54 | |||
55 | class TestWarnings(SoupTest): | ||
56 | |||
57 | def _no_parser_specified(self, s, is_there=True): | ||
58 | v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80]) | ||
59 | self.assertTrue(v) | ||
60 | |||
61 | def test_warning_if_no_parser_specified(self): | ||
62 | with warnings.catch_warnings(record=True) as w: | ||
63 | soup = self.soup("<a><b></b></a>") | ||
64 | msg = str(w[0].message) | ||
65 | self._assert_no_parser_specified(msg) | ||
66 | |||
67 | def test_warning_if_parser_specified_too_vague(self): | ||
68 | with warnings.catch_warnings(record=True) as w: | ||
69 | soup = self.soup("<a><b></b></a>", "html") | ||
70 | msg = str(w[0].message) | ||
71 | self._assert_no_parser_specified(msg) | ||
72 | |||
73 | def test_no_warning_if_explicit_parser_specified(self): | ||
74 | with warnings.catch_warnings(record=True) as w: | ||
75 | soup = self.soup("<a><b></b></a>", "html.parser") | ||
76 | self.assertEqual([], w) | ||
77 | |||
78 | def test_parseOnlyThese_renamed_to_parse_only(self): | ||
79 | with warnings.catch_warnings(record=True) as w: | ||
80 | soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b")) | ||
81 | msg = str(w[0].message) | ||
82 | self.assertTrue("parseOnlyThese" in msg) | ||
83 | self.assertTrue("parse_only" in msg) | ||
84 | self.assertEqual(b"<b></b>", soup.encode()) | ||
85 | |||
86 | def test_fromEncoding_renamed_to_from_encoding(self): | ||
87 | with warnings.catch_warnings(record=True) as w: | ||
88 | utf8 = b"\xc3\xa9" | ||
89 | soup = self.soup(utf8, fromEncoding="utf8") | ||
90 | msg = str(w[0].message) | ||
91 | self.assertTrue("fromEncoding" in msg) | ||
92 | self.assertTrue("from_encoding" in msg) | ||
93 | self.assertEqual("utf8", soup.original_encoding) | ||
94 | |||
95 | def test_unrecognized_keyword_argument(self): | ||
96 | self.assertRaises( | ||
97 | TypeError, self.soup, "<a>", no_such_argument=True) | ||
98 | |||
99 | class TestWarnings(SoupTest): | ||
100 | |||
101 | def test_disk_file_warning(self): | ||
102 | filehandle = tempfile.NamedTemporaryFile() | ||
103 | filename = filehandle.name | ||
104 | try: | ||
105 | with warnings.catch_warnings(record=True) as w: | ||
106 | soup = self.soup(filename) | ||
107 | msg = str(w[0].message) | ||
108 | self.assertTrue("looks like a filename" in msg) | ||
109 | finally: | ||
110 | filehandle.close() | ||
111 | |||
112 | # The file no longer exists, so Beautiful Soup will no longer issue the warning. | ||
113 | with warnings.catch_warnings(record=True) as w: | ||
114 | soup = self.soup(filename) | ||
115 | self.assertEqual(0, len(w)) | ||
116 | |||
117 | def test_url_warning(self): | ||
118 | with warnings.catch_warnings(record=True) as w: | ||
119 | soup = self.soup("http://www.crummy.com/") | ||
120 | msg = str(w[0].message) | ||
121 | self.assertTrue("looks like a URL" in msg) | ||
122 | |||
123 | with warnings.catch_warnings(record=True) as w: | ||
124 | soup = self.soup("http://www.crummy.com/ is great") | ||
125 | self.assertEqual(0, len(w)) | ||
126 | |||
127 | class TestSelectiveParsing(SoupTest): | ||
128 | |||
129 | def test_parse_with_soupstrainer(self): | ||
130 | markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>" | ||
131 | strainer = SoupStrainer("b") | ||
132 | soup = self.soup(markup, parse_only=strainer) | ||
133 | self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>") | ||
134 | |||
135 | |||
136 | class TestEntitySubstitution(unittest.TestCase): | ||
137 | """Standalone tests of the EntitySubstitution class.""" | ||
138 | def setUp(self): | ||
139 | self.sub = EntitySubstitution | ||
140 | |||
141 | def test_simple_html_substitution(self): | ||
142 | # Unicode characters corresponding to named HTML entites | ||
143 | # are substituted, and no others. | ||
144 | s = "foo\u2200\N{SNOWMAN}\u00f5bar" | ||
145 | self.assertEqual(self.sub.substitute_html(s), | ||
146 | "foo∀\N{SNOWMAN}õbar") | ||
147 | |||
148 | def test_smart_quote_substitution(self): | ||
149 | # MS smart quotes are a common source of frustration, so we | ||
150 | # give them a special test. | ||
151 | quotes = b"\x91\x92foo\x93\x94" | ||
152 | dammit = UnicodeDammit(quotes) | ||
153 | self.assertEqual(self.sub.substitute_html(dammit.markup), | ||
154 | "‘’foo“”") | ||
155 | |||
156 | def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self): | ||
157 | s = 'Welcome to "my bar"' | ||
158 | self.assertEqual(self.sub.substitute_xml(s, False), s) | ||
159 | |||
160 | def test_xml_attribute_quoting_normally_uses_double_quotes(self): | ||
161 | self.assertEqual(self.sub.substitute_xml("Welcome", True), | ||
162 | '"Welcome"') | ||
163 | self.assertEqual(self.sub.substitute_xml("Bob's Bar", True), | ||
164 | '"Bob\'s Bar"') | ||
165 | |||
166 | def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self): | ||
167 | s = 'Welcome to "my bar"' | ||
168 | self.assertEqual(self.sub.substitute_xml(s, True), | ||
169 | "'Welcome to \"my bar\"'") | ||
170 | |||
171 | def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self): | ||
172 | s = 'Welcome to "Bob\'s Bar"' | ||
173 | self.assertEqual( | ||
174 | self.sub.substitute_xml(s, True), | ||
175 | '"Welcome to "Bob\'s Bar""') | ||
176 | |||
177 | def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self): | ||
178 | quoted = 'Welcome to "Bob\'s Bar"' | ||
179 | self.assertEqual(self.sub.substitute_xml(quoted), quoted) | ||
180 | |||
181 | def test_xml_quoting_handles_angle_brackets(self): | ||
182 | self.assertEqual( | ||
183 | self.sub.substitute_xml("foo<bar>"), | ||
184 | "foo<bar>") | ||
185 | |||
186 | def test_xml_quoting_handles_ampersands(self): | ||
187 | self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&T") | ||
188 | |||
189 | def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self): | ||
190 | self.assertEqual( | ||
191 | self.sub.substitute_xml("ÁT&T"), | ||
192 | "&Aacute;T&T") | ||
193 | |||
194 | def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self): | ||
195 | self.assertEqual( | ||
196 | self.sub.substitute_xml_containing_entities("ÁT&T"), | ||
197 | "ÁT&T") | ||
198 | |||
199 | def test_quotes_not_html_substituted(self): | ||
200 | """There's no need to do this except inside attribute values.""" | ||
201 | text = 'Bob\'s "bar"' | ||
202 | self.assertEqual(self.sub.substitute_html(text), text) | ||
203 | |||
204 | |||
205 | class TestEncodingConversion(SoupTest): | ||
206 | # Test Beautiful Soup's ability to decode and encode from various | ||
207 | # encodings. | ||
208 | |||
209 | def setUp(self): | ||
210 | super(TestEncodingConversion, self).setUp() | ||
211 | self.unicode_data = '<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>' | ||
212 | self.utf8_data = self.unicode_data.encode("utf-8") | ||
213 | # Just so you know what it looks like. | ||
214 | self.assertEqual( | ||
215 | self.utf8_data, | ||
216 | b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>') | ||
217 | |||
218 | def test_ascii_in_unicode_out(self): | ||
219 | # ASCII input is converted to Unicode. The original_encoding | ||
220 | # attribute is set to 'utf-8', a superset of ASCII. | ||
221 | chardet = bs4.dammit.chardet_dammit | ||
222 | logging.disable(logging.WARNING) | ||
223 | try: | ||
224 | def noop(str): | ||
225 | return None | ||
226 | # Disable chardet, which will realize that the ASCII is ASCII. | ||
227 | bs4.dammit.chardet_dammit = noop | ||
228 | ascii = b"<foo>a</foo>" | ||
229 | soup_from_ascii = self.soup(ascii) | ||
230 | unicode_output = soup_from_ascii.decode() | ||
231 | self.assertTrue(isinstance(unicode_output, str)) | ||
232 | self.assertEqual(unicode_output, self.document_for(ascii.decode())) | ||
233 | self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8") | ||
234 | finally: | ||
235 | logging.disable(logging.NOTSET) | ||
236 | bs4.dammit.chardet_dammit = chardet | ||
237 | |||
238 | def test_unicode_in_unicode_out(self): | ||
239 | # Unicode input is left alone. The original_encoding attribute | ||
240 | # is not set. | ||
241 | soup_from_unicode = self.soup(self.unicode_data) | ||
242 | self.assertEqual(soup_from_unicode.decode(), self.unicode_data) | ||
243 | self.assertEqual(soup_from_unicode.foo.string, 'Sacr\xe9 bleu!') | ||
244 | self.assertEqual(soup_from_unicode.original_encoding, None) | ||
245 | |||
246 | def test_utf8_in_unicode_out(self): | ||
247 | # UTF-8 input is converted to Unicode. The original_encoding | ||
248 | # attribute is set. | ||
249 | soup_from_utf8 = self.soup(self.utf8_data) | ||
250 | self.assertEqual(soup_from_utf8.decode(), self.unicode_data) | ||
251 | self.assertEqual(soup_from_utf8.foo.string, 'Sacr\xe9 bleu!') | ||
252 | |||
253 | def test_utf8_out(self): | ||
254 | # The internal data structures can be encoded as UTF-8. | ||
255 | soup_from_unicode = self.soup(self.unicode_data) | ||
256 | self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data) | ||
257 | |||
258 | @skipIf( | ||
259 | PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2, | ||
260 | "Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.") | ||
261 | def test_attribute_name_containing_unicode_characters(self): | ||
262 | markup = '<div><a \N{SNOWMAN}="snowman"></a></div>' | ||
263 | self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8")) | ||
264 | |||
265 | class TestUnicodeDammit(unittest.TestCase): | ||
266 | """Standalone tests of UnicodeDammit.""" | ||
267 | |||
268 | def test_unicode_input(self): | ||
269 | markup = "I'm already Unicode! \N{SNOWMAN}" | ||
270 | dammit = UnicodeDammit(markup) | ||
271 | self.assertEqual(dammit.unicode_markup, markup) | ||
272 | |||
273 | def test_smart_quotes_to_unicode(self): | ||
274 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
275 | dammit = UnicodeDammit(markup) | ||
276 | self.assertEqual( | ||
277 | dammit.unicode_markup, "<foo>\u2018\u2019\u201c\u201d</foo>") | ||
278 | |||
279 | def test_smart_quotes_to_xml_entities(self): | ||
280 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
281 | dammit = UnicodeDammit(markup, smart_quotes_to="xml") | ||
282 | self.assertEqual( | ||
283 | dammit.unicode_markup, "<foo>‘’“”</foo>") | ||
284 | |||
285 | def test_smart_quotes_to_html_entities(self): | ||
286 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
287 | dammit = UnicodeDammit(markup, smart_quotes_to="html") | ||
288 | self.assertEqual( | ||
289 | dammit.unicode_markup, "<foo>‘’“”</foo>") | ||
290 | |||
291 | def test_smart_quotes_to_ascii(self): | ||
292 | markup = b"<foo>\x91\x92\x93\x94</foo>" | ||
293 | dammit = UnicodeDammit(markup, smart_quotes_to="ascii") | ||
294 | self.assertEqual( | ||
295 | dammit.unicode_markup, """<foo>''""</foo>""") | ||
296 | |||
297 | def test_detect_utf8(self): | ||
298 | utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83" | ||
299 | dammit = UnicodeDammit(utf8) | ||
300 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
301 | self.assertEqual(dammit.unicode_markup, 'Sacr\xe9 bleu! \N{SNOWMAN}') | ||
302 | |||
303 | |||
304 | def test_convert_hebrew(self): | ||
305 | hebrew = b"\xed\xe5\xec\xf9" | ||
306 | dammit = UnicodeDammit(hebrew, ["iso-8859-8"]) | ||
307 | self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8') | ||
308 | self.assertEqual(dammit.unicode_markup, '\u05dd\u05d5\u05dc\u05e9') | ||
309 | |||
310 | def test_dont_see_smart_quotes_where_there_are_none(self): | ||
311 | utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch" | ||
312 | dammit = UnicodeDammit(utf_8) | ||
313 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
314 | self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8) | ||
315 | |||
316 | def test_ignore_inappropriate_codecs(self): | ||
317 | utf8_data = "Räksmörgås".encode("utf-8") | ||
318 | dammit = UnicodeDammit(utf8_data, ["iso-8859-8"]) | ||
319 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
320 | |||
321 | def test_ignore_invalid_codecs(self): | ||
322 | utf8_data = "Räksmörgås".encode("utf-8") | ||
323 | for bad_encoding in ['.utf8', '...', 'utF---16.!']: | ||
324 | dammit = UnicodeDammit(utf8_data, [bad_encoding]) | ||
325 | self.assertEqual(dammit.original_encoding.lower(), 'utf-8') | ||
326 | |||
327 | def test_exclude_encodings(self): | ||
328 | # This is UTF-8. | ||
329 | utf8_data = "Räksmörgås".encode("utf-8") | ||
330 | |||
331 | # But if we exclude UTF-8 from consideration, the guess is | ||
332 | # Windows-1252. | ||
333 | dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"]) | ||
334 | self.assertEqual(dammit.original_encoding.lower(), 'windows-1252') | ||
335 | |||
336 | # And if we exclude that, there is no valid guess at all. | ||
337 | dammit = UnicodeDammit( | ||
338 | utf8_data, exclude_encodings=["utf-8", "windows-1252"]) | ||
339 | self.assertEqual(dammit.original_encoding, None) | ||
340 | |||
341 | def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self): | ||
342 | detected = EncodingDetector( | ||
343 | b'<?xml version="1.0" encoding="UTF-\xdb" ?>') | ||
344 | encodings = list(detected.encodings) | ||
345 | assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings | ||
346 | |||
347 | def test_detect_html5_style_meta_tag(self): | ||
348 | |||
349 | for data in ( | ||
350 | b'<html><meta charset="euc-jp" /></html>', | ||
351 | b"<html><meta charset='euc-jp' /></html>", | ||
352 | b"<html><meta charset=euc-jp /></html>", | ||
353 | b"<html><meta charset=euc-jp/></html>"): | ||
354 | dammit = UnicodeDammit(data, is_html=True) | ||
355 | self.assertEqual( | ||
356 | "euc-jp", dammit.original_encoding) | ||
357 | |||
358 | def test_last_ditch_entity_replacement(self): | ||
359 | # This is a UTF-8 document that contains bytestrings | ||
360 | # completely incompatible with UTF-8 (ie. encoded with some other | ||
361 | # encoding). | ||
362 | # | ||
363 | # Since there is no consistent encoding for the document, | ||
364 | # Unicode, Dammit will eventually encode the document as UTF-8 | ||
365 | # and encode the incompatible characters as REPLACEMENT | ||
366 | # CHARACTER. | ||
367 | # | ||
368 | # If chardet is installed, it will detect that the document | ||
369 | # can be converted into ISO-8859-1 without errors. This happens | ||
370 | # to be the wrong encoding, but it is a consistent encoding, so the | ||
371 | # code we're testing here won't run. | ||
372 | # | ||
373 | # So we temporarily disable chardet if it's present. | ||
374 | doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?> | ||
375 | <html><b>\330\250\330\252\330\261</b> | ||
376 | <i>\310\322\321\220\312\321\355\344</i></html>""" | ||
377 | chardet = bs4.dammit.chardet_dammit | ||
378 | logging.disable(logging.WARNING) | ||
379 | try: | ||
380 | def noop(str): | ||
381 | return None | ||
382 | bs4.dammit.chardet_dammit = noop | ||
383 | dammit = UnicodeDammit(doc) | ||
384 | self.assertEqual(True, dammit.contains_replacement_characters) | ||
385 | self.assertTrue("\ufffd" in dammit.unicode_markup) | ||
386 | |||
387 | soup = BeautifulSoup(doc, "html.parser") | ||
388 | self.assertTrue(soup.contains_replacement_characters) | ||
389 | finally: | ||
390 | logging.disable(logging.NOTSET) | ||
391 | bs4.dammit.chardet_dammit = chardet | ||
392 | |||
393 | def test_byte_order_mark_removed(self): | ||
394 | # A document written in UTF-16LE will have its byte order marker stripped. | ||
395 | data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' | ||
396 | dammit = UnicodeDammit(data) | ||
397 | self.assertEqual("<a>áé</a>", dammit.unicode_markup) | ||
398 | self.assertEqual("utf-16le", dammit.original_encoding) | ||
399 | |||
400 | def test_detwingle(self): | ||
401 | # Here's a UTF8 document. | ||
402 | utf8 = ("\N{SNOWMAN}" * 3).encode("utf8") | ||
403 | |||
404 | # Here's a Windows-1252 document. | ||
405 | windows_1252 = ( | ||
406 | "\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!" | ||
407 | "\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252") | ||
408 | |||
409 | # Through some unholy alchemy, they've been stuck together. | ||
410 | doc = utf8 + windows_1252 + utf8 | ||
411 | |||
412 | # The document can't be turned into UTF-8: | ||
413 | self.assertRaises(UnicodeDecodeError, doc.decode, "utf8") | ||
414 | |||
415 | # Unicode, Dammit thinks the whole document is Windows-1252, | ||
416 | # and decodes it into "☃☃☃“Hi, I like Windows!â€Ã¢ËœÆ’☃☃" | ||
417 | |||
418 | # But if we run it through fix_embedded_windows_1252, it's fixed: | ||
419 | |||
420 | fixed = UnicodeDammit.detwingle(doc) | ||
421 | self.assertEqual( | ||
422 | "☃☃☃“Hi, I like Windows!â€â˜ƒâ˜ƒâ˜ƒ", fixed.decode("utf8")) | ||
423 | |||
424 | def test_detwingle_ignores_multibyte_characters(self): | ||
425 | # Each of these characters has a UTF-8 representation ending | ||
426 | # in \x93. \x93 is a smart quote if interpreted as | ||
427 | # Windows-1252. But our code knows to skip over multibyte | ||
428 | # UTF-8 characters, so they'll survive the process unscathed. | ||
429 | for tricky_unicode_char in ( | ||
430 | "\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93' | ||
431 | "\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93' | ||
432 | "\xf0\x90\x90\x93", # This is a CJK character, not sure which one. | ||
433 | ): | ||
434 | input = tricky_unicode_char.encode("utf8") | ||
435 | self.assertTrue(input.endswith(b'\x93')) | ||
436 | output = UnicodeDammit.detwingle(input) | ||
437 | self.assertEqual(output, input) | ||
438 | |||
439 | class TestNamedspacedAttribute(SoupTest): | ||
440 | |||
441 | def test_name_may_be_none(self): | ||
442 | a = NamespacedAttribute("xmlns", None) | ||
443 | self.assertEqual(a, "xmlns") | ||
444 | |||
445 | def test_attribute_is_equivalent_to_colon_separated_string(self): | ||
446 | a = NamespacedAttribute("a", "b") | ||
447 | self.assertEqual("a:b", a) | ||
448 | |||
449 | def test_attributes_are_equivalent_if_prefix_and_name_identical(self): | ||
450 | a = NamespacedAttribute("a", "b", "c") | ||
451 | b = NamespacedAttribute("a", "b", "c") | ||
452 | self.assertEqual(a, b) | ||
453 | |||
454 | # The actual namespace is not considered. | ||
455 | c = NamespacedAttribute("a", "b", None) | ||
456 | self.assertEqual(a, c) | ||
457 | |||
458 | # But name and prefix are important. | ||
459 | d = NamespacedAttribute("a", "z", "c") | ||
460 | self.assertNotEqual(a, d) | ||
461 | |||
462 | e = NamespacedAttribute("z", "b", "c") | ||
463 | self.assertNotEqual(a, e) | ||
464 | |||
465 | |||
466 | class TestAttributeValueWithCharsetSubstitution(unittest.TestCase): | ||
467 | |||
468 | def test_content_meta_attribute_value(self): | ||
469 | value = CharsetMetaAttributeValue("euc-jp") | ||
470 | self.assertEqual("euc-jp", value) | ||
471 | self.assertEqual("euc-jp", value.original_value) | ||
472 | self.assertEqual("utf8", value.encode("utf8")) | ||
473 | |||
474 | |||
475 | def test_content_meta_attribute_value(self): | ||
476 | value = ContentMetaAttributeValue("text/html; charset=euc-jp") | ||
477 | self.assertEqual("text/html; charset=euc-jp", value) | ||
478 | self.assertEqual("text/html; charset=euc-jp", value.original_value) | ||
479 | self.assertEqual("text/html; charset=utf8", value.encode("utf8")) | ||
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py deleted file mode 100644 index cf0f1abe0c..0000000000 --- a/bitbake/lib/bs4/tests/test_tree.py +++ /dev/null | |||
@@ -1,2004 +0,0 @@ | |||
1 | # -*- coding: utf-8 -*- | ||
2 | """Tests for Beautiful Soup's tree traversal methods. | ||
3 | |||
4 | The tree traversal methods are the main advantage of using Beautiful | ||
5 | Soup over just using a parser. | ||
6 | |||
7 | Different parsers will build different Beautiful Soup trees given the | ||
8 | same markup, but all Beautiful Soup trees can be traversed with the | ||
9 | methods tested here. | ||
10 | """ | ||
11 | |||
12 | import copy | ||
13 | import pickle | ||
14 | import re | ||
15 | import warnings | ||
16 | from bs4 import BeautifulSoup | ||
17 | from bs4.builder import builder_registry | ||
18 | from bs4.element import ( | ||
19 | PY3K, | ||
20 | CData, | ||
21 | Comment, | ||
22 | Declaration, | ||
23 | Doctype, | ||
24 | NavigableString, | ||
25 | SoupStrainer, | ||
26 | Tag, | ||
27 | ) | ||
28 | from bs4.testing import SoupTest | ||
29 | |||
30 | XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None) | ||
31 | LXML_PRESENT = (builder_registry.lookup("lxml") is not None) | ||
32 | |||
33 | class TreeTest(SoupTest): | ||
34 | |||
35 | def assertSelects(self, tags, should_match): | ||
36 | """Make sure that the given tags have the correct text. | ||
37 | |||
38 | This is used in tests that define a bunch of tags, each | ||
39 | containing a single string, and then select certain strings by | ||
40 | some mechanism. | ||
41 | """ | ||
42 | self.assertEqual([tag.string for tag in tags], should_match) | ||
43 | |||
44 | def assertSelectsIDs(self, tags, should_match): | ||
45 | """Make sure that the given tags have the correct IDs. | ||
46 | |||
47 | This is used in tests that define a bunch of tags, each | ||
48 | containing a single string, and then select certain strings by | ||
49 | some mechanism. | ||
50 | """ | ||
51 | self.assertEqual([tag['id'] for tag in tags], should_match) | ||
52 | |||
53 | |||
54 | class TestFind(TreeTest): | ||
55 | """Basic tests of the find() method. | ||
56 | |||
57 | find() just calls find_all() with limit=1, so it's not tested all | ||
58 | that thouroughly here. | ||
59 | """ | ||
60 | |||
61 | def test_find_tag(self): | ||
62 | soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>") | ||
63 | self.assertEqual(soup.find("b").string, "2") | ||
64 | |||
65 | def test_unicode_text_find(self): | ||
66 | soup = self.soup('<h1>Räksmörgås</h1>') | ||
67 | self.assertEqual(soup.find(string='Räksmörgås'), 'Räksmörgås') | ||
68 | |||
69 | def test_unicode_attribute_find(self): | ||
70 | soup = self.soup('<h1 id="Räksmörgås">here it is</h1>') | ||
71 | str(soup) | ||
72 | self.assertEqual("here it is", soup.find(id='Räksmörgås').text) | ||
73 | |||
74 | |||
75 | def test_find_everything(self): | ||
76 | """Test an optimization that finds all tags.""" | ||
77 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
78 | self.assertEqual(2, len(soup.find_all())) | ||
79 | |||
80 | def test_find_everything_with_name(self): | ||
81 | """Test an optimization that finds all tags with a given name.""" | ||
82 | soup = self.soup("<a>foo</a><b>bar</b><a>baz</a>") | ||
83 | self.assertEqual(2, len(soup.find_all('a'))) | ||
84 | |||
85 | class TestFindAll(TreeTest): | ||
86 | """Basic tests of the find_all() method.""" | ||
87 | |||
88 | def test_find_all_text_nodes(self): | ||
89 | """You can search the tree for text nodes.""" | ||
90 | soup = self.soup("<html>Foo<b>bar</b>\xbb</html>") | ||
91 | # Exact match. | ||
92 | self.assertEqual(soup.find_all(string="bar"), ["bar"]) | ||
93 | self.assertEqual(soup.find_all(text="bar"), ["bar"]) | ||
94 | # Match any of a number of strings. | ||
95 | self.assertEqual( | ||
96 | soup.find_all(text=["Foo", "bar"]), ["Foo", "bar"]) | ||
97 | # Match a regular expression. | ||
98 | self.assertEqual(soup.find_all(text=re.compile('.*')), | ||
99 | ["Foo", "bar", '\xbb']) | ||
100 | # Match anything. | ||
101 | self.assertEqual(soup.find_all(text=True), | ||
102 | ["Foo", "bar", '\xbb']) | ||
103 | |||
104 | def test_find_all_limit(self): | ||
105 | """You can limit the number of items returned by find_all.""" | ||
106 | soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>") | ||
107 | self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"]) | ||
108 | self.assertSelects(soup.find_all('a', limit=1), ["1"]) | ||
109 | self.assertSelects( | ||
110 | soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"]) | ||
111 | |||
112 | # A limit of 0 means no limit. | ||
113 | self.assertSelects( | ||
114 | soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"]) | ||
115 | |||
116 | def test_calling_a_tag_is_calling_findall(self): | ||
117 | soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>") | ||
118 | self.assertSelects(soup('a', limit=1), ["1"]) | ||
119 | self.assertSelects(soup.b(id="foo"), ["3"]) | ||
120 | |||
121 | def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self): | ||
122 | soup = self.soup("<a></a>") | ||
123 | # Create a self-referential list. | ||
124 | l = [] | ||
125 | l.append(l) | ||
126 | |||
127 | # Without special code in _normalize_search_value, this would cause infinite | ||
128 | # recursion. | ||
129 | self.assertEqual([], soup.find_all(l)) | ||
130 | |||
131 | def test_find_all_resultset(self): | ||
132 | """All find_all calls return a ResultSet""" | ||
133 | soup = self.soup("<a></a>") | ||
134 | result = soup.find_all("a") | ||
135 | self.assertTrue(hasattr(result, "source")) | ||
136 | |||
137 | result = soup.find_all(True) | ||
138 | self.assertTrue(hasattr(result, "source")) | ||
139 | |||
140 | result = soup.find_all(text="foo") | ||
141 | self.assertTrue(hasattr(result, "source")) | ||
142 | |||
143 | |||
144 | class TestFindAllBasicNamespaces(TreeTest): | ||
145 | |||
146 | def test_find_by_namespaced_name(self): | ||
147 | soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">') | ||
148 | self.assertEqual("4", soup.find("mathml:msqrt").string) | ||
149 | self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name) | ||
150 | |||
151 | |||
152 | class TestFindAllByName(TreeTest): | ||
153 | """Test ways of finding tags by tag name.""" | ||
154 | |||
155 | def setUp(self): | ||
156 | super(TreeTest, self).setUp() | ||
157 | self.tree = self.soup("""<a>First tag.</a> | ||
158 | <b>Second tag.</b> | ||
159 | <c>Third <a>Nested tag.</a> tag.</c>""") | ||
160 | |||
161 | def test_find_all_by_tag_name(self): | ||
162 | # Find all the <a> tags. | ||
163 | self.assertSelects( | ||
164 | self.tree.find_all('a'), ['First tag.', 'Nested tag.']) | ||
165 | |||
166 | def test_find_all_by_name_and_text(self): | ||
167 | self.assertSelects( | ||
168 | self.tree.find_all('a', text='First tag.'), ['First tag.']) | ||
169 | |||
170 | self.assertSelects( | ||
171 | self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.']) | ||
172 | |||
173 | self.assertSelects( | ||
174 | self.tree.find_all('a', text=re.compile("tag")), | ||
175 | ['First tag.', 'Nested tag.']) | ||
176 | |||
177 | |||
178 | def test_find_all_on_non_root_element(self): | ||
179 | # You can call find_all on any node, not just the root. | ||
180 | self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.']) | ||
181 | |||
182 | def test_calling_element_invokes_find_all(self): | ||
183 | self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.']) | ||
184 | |||
185 | def test_find_all_by_tag_strainer(self): | ||
186 | self.assertSelects( | ||
187 | self.tree.find_all(SoupStrainer('a')), | ||
188 | ['First tag.', 'Nested tag.']) | ||
189 | |||
190 | def test_find_all_by_tag_names(self): | ||
191 | self.assertSelects( | ||
192 | self.tree.find_all(['a', 'b']), | ||
193 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
194 | |||
195 | def test_find_all_by_tag_dict(self): | ||
196 | self.assertSelects( | ||
197 | self.tree.find_all({'a' : True, 'b' : True}), | ||
198 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
199 | |||
200 | def test_find_all_by_tag_re(self): | ||
201 | self.assertSelects( | ||
202 | self.tree.find_all(re.compile('^[ab]$')), | ||
203 | ['First tag.', 'Second tag.', 'Nested tag.']) | ||
204 | |||
205 | def test_find_all_with_tags_matching_method(self): | ||
206 | # You can define an oracle method that determines whether | ||
207 | # a tag matches the search. | ||
208 | def id_matches_name(tag): | ||
209 | return tag.name == tag.get('id') | ||
210 | |||
211 | tree = self.soup("""<a id="a">Match 1.</a> | ||
212 | <a id="1">Does not match.</a> | ||
213 | <b id="b">Match 2.</a>""") | ||
214 | |||
215 | self.assertSelects( | ||
216 | tree.find_all(id_matches_name), ["Match 1.", "Match 2."]) | ||
217 | |||
218 | |||
219 | class TestFindAllByAttribute(TreeTest): | ||
220 | |||
221 | def test_find_all_by_attribute_name(self): | ||
222 | # You can pass in keyword arguments to find_all to search by | ||
223 | # attribute. | ||
224 | tree = self.soup(""" | ||
225 | <a id="first">Matching a.</a> | ||
226 | <a id="second"> | ||
227 | Non-matching <b id="first">Matching b.</b>a. | ||
228 | </a>""") | ||
229 | self.assertSelects(tree.find_all(id='first'), | ||
230 | ["Matching a.", "Matching b."]) | ||
231 | |||
232 | def test_find_all_by_utf8_attribute_value(self): | ||
233 | peace = "×ולש".encode("utf8") | ||
234 | data = '<a title="×ולש"></a>'.encode("utf8") | ||
235 | soup = self.soup(data) | ||
236 | self.assertEqual([soup.a], soup.find_all(title=peace)) | ||
237 | self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8"))) | ||
238 | self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"])) | ||
239 | |||
240 | def test_find_all_by_attribute_dict(self): | ||
241 | # You can pass in a dictionary as the argument 'attrs'. This | ||
242 | # lets you search for attributes like 'name' (a fixed argument | ||
243 | # to find_all) and 'class' (a reserved word in Python.) | ||
244 | tree = self.soup(""" | ||
245 | <a name="name1" class="class1">Name match.</a> | ||
246 | <a name="name2" class="class2">Class match.</a> | ||
247 | <a name="name3" class="class3">Non-match.</a> | ||
248 | <name1>A tag called 'name1'.</name1> | ||
249 | """) | ||
250 | |||
251 | # This doesn't do what you want. | ||
252 | self.assertSelects(tree.find_all(name='name1'), | ||
253 | ["A tag called 'name1'."]) | ||
254 | # This does what you want. | ||
255 | self.assertSelects(tree.find_all(attrs={'name' : 'name1'}), | ||
256 | ["Name match."]) | ||
257 | |||
258 | self.assertSelects(tree.find_all(attrs={'class' : 'class2'}), | ||
259 | ["Class match."]) | ||
260 | |||
261 | def test_find_all_by_class(self): | ||
262 | tree = self.soup(""" | ||
263 | <a class="1">Class 1.</a> | ||
264 | <a class="2">Class 2.</a> | ||
265 | <b class="1">Class 1.</b> | ||
266 | <c class="3 4">Class 3 and 4.</c> | ||
267 | """) | ||
268 | |||
269 | # Passing in the class_ keyword argument will search against | ||
270 | # the 'class' attribute. | ||
271 | self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.']) | ||
272 | self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.']) | ||
273 | self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.']) | ||
274 | |||
275 | # Passing in a string to 'attrs' will also search the CSS class. | ||
276 | self.assertSelects(tree.find_all('a', '1'), ['Class 1.']) | ||
277 | self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.']) | ||
278 | self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.']) | ||
279 | self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.']) | ||
280 | |||
281 | def test_find_by_class_when_multiple_classes_present(self): | ||
282 | tree = self.soup("<gar class='foo bar'>Found it</gar>") | ||
283 | |||
284 | f = tree.find_all("gar", class_=re.compile("o")) | ||
285 | self.assertSelects(f, ["Found it"]) | ||
286 | |||
287 | f = tree.find_all("gar", class_=re.compile("a")) | ||
288 | self.assertSelects(f, ["Found it"]) | ||
289 | |||
290 | # Since the class is not the string "foo bar", but the two | ||
291 | # strings "foo" and "bar", this will not find anything. | ||
292 | f = tree.find_all("gar", class_=re.compile("o b")) | ||
293 | self.assertSelects(f, []) | ||
294 | |||
295 | def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self): | ||
296 | soup = self.soup("<a class='bar'>Found it</a>") | ||
297 | |||
298 | self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"]) | ||
299 | |||
300 | def big_attribute_value(value): | ||
301 | return len(value) > 3 | ||
302 | |||
303 | self.assertSelects(soup.find_all("a", big_attribute_value), []) | ||
304 | |||
305 | def small_attribute_value(value): | ||
306 | return len(value) <= 3 | ||
307 | |||
308 | self.assertSelects( | ||
309 | soup.find_all("a", small_attribute_value), ["Found it"]) | ||
310 | |||
311 | def test_find_all_with_string_for_attrs_finds_multiple_classes(self): | ||
312 | soup = self.soup('<a class="foo bar"></a><a class="foo"></a>') | ||
313 | a, a2 = soup.find_all("a") | ||
314 | self.assertEqual([a, a2], soup.find_all("a", "foo")) | ||
315 | self.assertEqual([a], soup.find_all("a", "bar")) | ||
316 | |||
317 | # If you specify the class as a string that contains a | ||
318 | # space, only that specific value will be found. | ||
319 | self.assertEqual([a], soup.find_all("a", class_="foo bar")) | ||
320 | self.assertEqual([a], soup.find_all("a", "foo bar")) | ||
321 | self.assertEqual([], soup.find_all("a", "bar foo")) | ||
322 | |||
323 | def test_find_all_by_attribute_soupstrainer(self): | ||
324 | tree = self.soup(""" | ||
325 | <a id="first">Match.</a> | ||
326 | <a id="second">Non-match.</a>""") | ||
327 | |||
328 | strainer = SoupStrainer(attrs={'id' : 'first'}) | ||
329 | self.assertSelects(tree.find_all(strainer), ['Match.']) | ||
330 | |||
331 | def test_find_all_with_missing_atribute(self): | ||
332 | # You can pass in None as the value of an attribute to find_all. | ||
333 | # This will match tags that do not have that attribute set. | ||
334 | tree = self.soup("""<a id="1">ID present.</a> | ||
335 | <a>No ID present.</a> | ||
336 | <a id="">ID is empty.</a>""") | ||
337 | self.assertSelects(tree.find_all('a', id=None), ["No ID present."]) | ||
338 | |||
339 | def test_find_all_with_defined_attribute(self): | ||
340 | # You can pass in None as the value of an attribute to find_all. | ||
341 | # This will match tags that have that attribute set to any value. | ||
342 | tree = self.soup("""<a id="1">ID present.</a> | ||
343 | <a>No ID present.</a> | ||
344 | <a id="">ID is empty.</a>""") | ||
345 | self.assertSelects( | ||
346 | tree.find_all(id=True), ["ID present.", "ID is empty."]) | ||
347 | |||
348 | def test_find_all_with_numeric_attribute(self): | ||
349 | # If you search for a number, it's treated as a string. | ||
350 | tree = self.soup("""<a id=1>Unquoted attribute.</a> | ||
351 | <a id="1">Quoted attribute.</a>""") | ||
352 | |||
353 | expected = ["Unquoted attribute.", "Quoted attribute."] | ||
354 | self.assertSelects(tree.find_all(id=1), expected) | ||
355 | self.assertSelects(tree.find_all(id="1"), expected) | ||
356 | |||
357 | def test_find_all_with_list_attribute_values(self): | ||
358 | # You can pass a list of attribute values instead of just one, | ||
359 | # and you'll get tags that match any of the values. | ||
360 | tree = self.soup("""<a id="1">1</a> | ||
361 | <a id="2">2</a> | ||
362 | <a id="3">3</a> | ||
363 | <a>No ID.</a>""") | ||
364 | self.assertSelects(tree.find_all(id=["1", "3", "4"]), | ||
365 | ["1", "3"]) | ||
366 | |||
367 | def test_find_all_with_regular_expression_attribute_value(self): | ||
368 | # You can pass a regular expression as an attribute value, and | ||
369 | # you'll get tags whose values for that attribute match the | ||
370 | # regular expression. | ||
371 | tree = self.soup("""<a id="a">One a.</a> | ||
372 | <a id="aa">Two as.</a> | ||
373 | <a id="ab">Mixed as and bs.</a> | ||
374 | <a id="b">One b.</a> | ||
375 | <a>No ID.</a>""") | ||
376 | |||
377 | self.assertSelects(tree.find_all(id=re.compile("^a+$")), | ||
378 | ["One a.", "Two as."]) | ||
379 | |||
380 | def test_find_by_name_and_containing_string(self): | ||
381 | soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>") | ||
382 | a = soup.a | ||
383 | |||
384 | self.assertEqual([a], soup.find_all("a", text="foo")) | ||
385 | self.assertEqual([], soup.find_all("a", text="bar")) | ||
386 | self.assertEqual([], soup.find_all("a", text="bar")) | ||
387 | |||
388 | def test_find_by_name_and_containing_string_when_string_is_buried(self): | ||
389 | soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>") | ||
390 | self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo")) | ||
391 | |||
392 | def test_find_by_attribute_and_containing_string(self): | ||
393 | soup = self.soup('<b id="1">foo</b><a id="2">foo</a>') | ||
394 | a = soup.a | ||
395 | |||
396 | self.assertEqual([a], soup.find_all(id=2, text="foo")) | ||
397 | self.assertEqual([], soup.find_all(id=1, text="bar")) | ||
398 | |||
399 | |||
400 | |||
401 | |||
402 | class TestIndex(TreeTest): | ||
403 | """Test Tag.index""" | ||
404 | def test_index(self): | ||
405 | tree = self.soup("""<div> | ||
406 | <a>Identical</a> | ||
407 | <b>Not identical</b> | ||
408 | <a>Identical</a> | ||
409 | |||
410 | <c><d>Identical with child</d></c> | ||
411 | <b>Also not identical</b> | ||
412 | <c><d>Identical with child</d></c> | ||
413 | </div>""") | ||
414 | div = tree.div | ||
415 | for i, element in enumerate(div.contents): | ||
416 | self.assertEqual(i, div.index(element)) | ||
417 | self.assertRaises(ValueError, tree.index, 1) | ||
418 | |||
419 | |||
420 | class TestParentOperations(TreeTest): | ||
421 | """Test navigation and searching through an element's parents.""" | ||
422 | |||
423 | def setUp(self): | ||
424 | super(TestParentOperations, self).setUp() | ||
425 | self.tree = self.soup('''<ul id="empty"></ul> | ||
426 | <ul id="top"> | ||
427 | <ul id="middle"> | ||
428 | <ul id="bottom"> | ||
429 | <b>Start here</b> | ||
430 | </ul> | ||
431 | </ul>''') | ||
432 | self.start = self.tree.b | ||
433 | |||
434 | |||
435 | def test_parent(self): | ||
436 | self.assertEqual(self.start.parent['id'], 'bottom') | ||
437 | self.assertEqual(self.start.parent.parent['id'], 'middle') | ||
438 | self.assertEqual(self.start.parent.parent.parent['id'], 'top') | ||
439 | |||
440 | def test_parent_of_top_tag_is_soup_object(self): | ||
441 | top_tag = self.tree.contents[0] | ||
442 | self.assertEqual(top_tag.parent, self.tree) | ||
443 | |||
444 | def test_soup_object_has_no_parent(self): | ||
445 | self.assertEqual(None, self.tree.parent) | ||
446 | |||
447 | def test_find_parents(self): | ||
448 | self.assertSelectsIDs( | ||
449 | self.start.find_parents('ul'), ['bottom', 'middle', 'top']) | ||
450 | self.assertSelectsIDs( | ||
451 | self.start.find_parents('ul', id="middle"), ['middle']) | ||
452 | |||
453 | def test_find_parent(self): | ||
454 | self.assertEqual(self.start.find_parent('ul')['id'], 'bottom') | ||
455 | self.assertEqual(self.start.find_parent('ul', id='top')['id'], 'top') | ||
456 | |||
457 | def test_parent_of_text_element(self): | ||
458 | text = self.tree.find(text="Start here") | ||
459 | self.assertEqual(text.parent.name, 'b') | ||
460 | |||
461 | def test_text_element_find_parent(self): | ||
462 | text = self.tree.find(text="Start here") | ||
463 | self.assertEqual(text.find_parent('ul')['id'], 'bottom') | ||
464 | |||
465 | def test_parent_generator(self): | ||
466 | parents = [parent['id'] for parent in self.start.parents | ||
467 | if parent is not None and 'id' in parent.attrs] | ||
468 | self.assertEqual(parents, ['bottom', 'middle', 'top']) | ||
469 | |||
470 | |||
471 | class ProximityTest(TreeTest): | ||
472 | |||
473 | def setUp(self): | ||
474 | super(TreeTest, self).setUp() | ||
475 | self.tree = self.soup( | ||
476 | '<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>') | ||
477 | |||
478 | |||
479 | class TestNextOperations(ProximityTest): | ||
480 | |||
481 | def setUp(self): | ||
482 | super(TestNextOperations, self).setUp() | ||
483 | self.start = self.tree.b | ||
484 | |||
485 | def test_next(self): | ||
486 | self.assertEqual(self.start.next_element, "One") | ||
487 | self.assertEqual(self.start.next_element.next_element['id'], "2") | ||
488 | |||
489 | def test_next_of_last_item_is_none(self): | ||
490 | last = self.tree.find(text="Three") | ||
491 | self.assertEqual(last.next_element, None) | ||
492 | |||
493 | def test_next_of_root_is_none(self): | ||
494 | # The document root is outside the next/previous chain. | ||
495 | self.assertEqual(self.tree.next_element, None) | ||
496 | |||
497 | def test_find_all_next(self): | ||
498 | self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"]) | ||
499 | self.start.find_all_next(id=3) | ||
500 | self.assertSelects(self.start.find_all_next(id=3), ["Three"]) | ||
501 | |||
502 | def test_find_next(self): | ||
503 | self.assertEqual(self.start.find_next('b')['id'], '2') | ||
504 | self.assertEqual(self.start.find_next(text="Three"), "Three") | ||
505 | |||
506 | def test_find_next_for_text_element(self): | ||
507 | text = self.tree.find(text="One") | ||
508 | self.assertEqual(text.find_next("b").string, "Two") | ||
509 | self.assertSelects(text.find_all_next("b"), ["Two", "Three"]) | ||
510 | |||
511 | def test_next_generator(self): | ||
512 | start = self.tree.find(text="Two") | ||
513 | successors = [node for node in start.next_elements] | ||
514 | # There are two successors: the final <b> tag and its text contents. | ||
515 | tag, contents = successors | ||
516 | self.assertEqual(tag['id'], '3') | ||
517 | self.assertEqual(contents, "Three") | ||
518 | |||
519 | class TestPreviousOperations(ProximityTest): | ||
520 | |||
521 | def setUp(self): | ||
522 | super(TestPreviousOperations, self).setUp() | ||
523 | self.end = self.tree.find(text="Three") | ||
524 | |||
525 | def test_previous(self): | ||
526 | self.assertEqual(self.end.previous_element['id'], "3") | ||
527 | self.assertEqual(self.end.previous_element.previous_element, "Two") | ||
528 | |||
529 | def test_previous_of_first_item_is_none(self): | ||
530 | first = self.tree.find('html') | ||
531 | self.assertEqual(first.previous_element, None) | ||
532 | |||
533 | def test_previous_of_root_is_none(self): | ||
534 | # The document root is outside the next/previous chain. | ||
535 | # XXX This is broken! | ||
536 | #self.assertEqual(self.tree.previous_element, None) | ||
537 | pass | ||
538 | |||
539 | def test_find_all_previous(self): | ||
540 | # The <b> tag containing the "Three" node is the predecessor | ||
541 | # of the "Three" node itself, which is why "Three" shows up | ||
542 | # here. | ||
543 | self.assertSelects( | ||
544 | self.end.find_all_previous('b'), ["Three", "Two", "One"]) | ||
545 | self.assertSelects(self.end.find_all_previous(id=1), ["One"]) | ||
546 | |||
547 | def test_find_previous(self): | ||
548 | self.assertEqual(self.end.find_previous('b')['id'], '3') | ||
549 | self.assertEqual(self.end.find_previous(text="One"), "One") | ||
550 | |||
551 | def test_find_previous_for_text_element(self): | ||
552 | text = self.tree.find(text="Three") | ||
553 | self.assertEqual(text.find_previous("b").string, "Three") | ||
554 | self.assertSelects( | ||
555 | text.find_all_previous("b"), ["Three", "Two", "One"]) | ||
556 | |||
557 | def test_previous_generator(self): | ||
558 | start = self.tree.find(text="One") | ||
559 | predecessors = [node for node in start.previous_elements] | ||
560 | |||
561 | # There are four predecessors: the <b> tag containing "One" | ||
562 | # the <body> tag, the <head> tag, and the <html> tag. | ||
563 | b, body, head, html = predecessors | ||
564 | self.assertEqual(b['id'], '1') | ||
565 | self.assertEqual(body.name, "body") | ||
566 | self.assertEqual(head.name, "head") | ||
567 | self.assertEqual(html.name, "html") | ||
568 | |||
569 | |||
570 | class SiblingTest(TreeTest): | ||
571 | |||
572 | def setUp(self): | ||
573 | super(SiblingTest, self).setUp() | ||
574 | markup = '''<html> | ||
575 | <span id="1"> | ||
576 | <span id="1.1"></span> | ||
577 | </span> | ||
578 | <span id="2"> | ||
579 | <span id="2.1"></span> | ||
580 | </span> | ||
581 | <span id="3"> | ||
582 | <span id="3.1"></span> | ||
583 | </span> | ||
584 | <span id="4"></span> | ||
585 | </html>''' | ||
586 | # All that whitespace looks good but makes the tests more | ||
587 | # difficult. Get rid of it. | ||
588 | markup = re.compile(r"\n\s*").sub("", markup) | ||
589 | self.tree = self.soup(markup) | ||
590 | |||
591 | |||
592 | class TestNextSibling(SiblingTest): | ||
593 | |||
594 | def setUp(self): | ||
595 | super(TestNextSibling, self).setUp() | ||
596 | self.start = self.tree.find(id="1") | ||
597 | |||
598 | def test_next_sibling_of_root_is_none(self): | ||
599 | self.assertEqual(self.tree.next_sibling, None) | ||
600 | |||
601 | def test_next_sibling(self): | ||
602 | self.assertEqual(self.start.next_sibling['id'], '2') | ||
603 | self.assertEqual(self.start.next_sibling.next_sibling['id'], '3') | ||
604 | |||
605 | # Note the difference between next_sibling and next_element. | ||
606 | self.assertEqual(self.start.next_element['id'], '1.1') | ||
607 | |||
608 | def test_next_sibling_may_not_exist(self): | ||
609 | self.assertEqual(self.tree.html.next_sibling, None) | ||
610 | |||
611 | nested_span = self.tree.find(id="1.1") | ||
612 | self.assertEqual(nested_span.next_sibling, None) | ||
613 | |||
614 | last_span = self.tree.find(id="4") | ||
615 | self.assertEqual(last_span.next_sibling, None) | ||
616 | |||
617 | def test_find_next_sibling(self): | ||
618 | self.assertEqual(self.start.find_next_sibling('span')['id'], '2') | ||
619 | |||
620 | def test_next_siblings(self): | ||
621 | self.assertSelectsIDs(self.start.find_next_siblings("span"), | ||
622 | ['2', '3', '4']) | ||
623 | |||
624 | self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3']) | ||
625 | |||
626 | def test_next_sibling_for_text_element(self): | ||
627 | soup = self.soup("Foo<b>bar</b>baz") | ||
628 | start = soup.find(text="Foo") | ||
629 | self.assertEqual(start.next_sibling.name, 'b') | ||
630 | self.assertEqual(start.next_sibling.next_sibling, 'baz') | ||
631 | |||
632 | self.assertSelects(start.find_next_siblings('b'), ['bar']) | ||
633 | self.assertEqual(start.find_next_sibling(text="baz"), "baz") | ||
634 | self.assertEqual(start.find_next_sibling(text="nonesuch"), None) | ||
635 | |||
636 | |||
637 | class TestPreviousSibling(SiblingTest): | ||
638 | |||
639 | def setUp(self): | ||
640 | super(TestPreviousSibling, self).setUp() | ||
641 | self.end = self.tree.find(id="4") | ||
642 | |||
643 | def test_previous_sibling_of_root_is_none(self): | ||
644 | self.assertEqual(self.tree.previous_sibling, None) | ||
645 | |||
646 | def test_previous_sibling(self): | ||
647 | self.assertEqual(self.end.previous_sibling['id'], '3') | ||
648 | self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2') | ||
649 | |||
650 | # Note the difference between previous_sibling and previous_element. | ||
651 | self.assertEqual(self.end.previous_element['id'], '3.1') | ||
652 | |||
653 | def test_previous_sibling_may_not_exist(self): | ||
654 | self.assertEqual(self.tree.html.previous_sibling, None) | ||
655 | |||
656 | nested_span = self.tree.find(id="1.1") | ||
657 | self.assertEqual(nested_span.previous_sibling, None) | ||
658 | |||
659 | first_span = self.tree.find(id="1") | ||
660 | self.assertEqual(first_span.previous_sibling, None) | ||
661 | |||
662 | def test_find_previous_sibling(self): | ||
663 | self.assertEqual(self.end.find_previous_sibling('span')['id'], '3') | ||
664 | |||
665 | def test_previous_siblings(self): | ||
666 | self.assertSelectsIDs(self.end.find_previous_siblings("span"), | ||
667 | ['3', '2', '1']) | ||
668 | |||
669 | self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1']) | ||
670 | |||
671 | def test_previous_sibling_for_text_element(self): | ||
672 | soup = self.soup("Foo<b>bar</b>baz") | ||
673 | start = soup.find(text="baz") | ||
674 | self.assertEqual(start.previous_sibling.name, 'b') | ||
675 | self.assertEqual(start.previous_sibling.previous_sibling, 'Foo') | ||
676 | |||
677 | self.assertSelects(start.find_previous_siblings('b'), ['bar']) | ||
678 | self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo") | ||
679 | self.assertEqual(start.find_previous_sibling(text="nonesuch"), None) | ||
680 | |||
681 | |||
682 | class TestTagCreation(SoupTest): | ||
683 | """Test the ability to create new tags.""" | ||
684 | def test_new_tag(self): | ||
685 | soup = self.soup("") | ||
686 | new_tag = soup.new_tag("foo", bar="baz") | ||
687 | self.assertTrue(isinstance(new_tag, Tag)) | ||
688 | self.assertEqual("foo", new_tag.name) | ||
689 | self.assertEqual(dict(bar="baz"), new_tag.attrs) | ||
690 | self.assertEqual(None, new_tag.parent) | ||
691 | |||
692 | def test_tag_inherits_self_closing_rules_from_builder(self): | ||
693 | if XML_BUILDER_PRESENT: | ||
694 | xml_soup = BeautifulSoup("", "lxml-xml") | ||
695 | xml_br = xml_soup.new_tag("br") | ||
696 | xml_p = xml_soup.new_tag("p") | ||
697 | |||
698 | # Both the <br> and <p> tag are empty-element, just because | ||
699 | # they have no contents. | ||
700 | self.assertEqual(b"<br/>", xml_br.encode()) | ||
701 | self.assertEqual(b"<p/>", xml_p.encode()) | ||
702 | |||
703 | html_soup = BeautifulSoup("", "html.parser") | ||
704 | html_br = html_soup.new_tag("br") | ||
705 | html_p = html_soup.new_tag("p") | ||
706 | |||
707 | # The HTML builder users HTML's rules about which tags are | ||
708 | # empty-element tags, and the new tags reflect these rules. | ||
709 | self.assertEqual(b"<br/>", html_br.encode()) | ||
710 | self.assertEqual(b"<p></p>", html_p.encode()) | ||
711 | |||
712 | def test_new_string_creates_navigablestring(self): | ||
713 | soup = self.soup("") | ||
714 | s = soup.new_string("foo") | ||
715 | self.assertEqual("foo", s) | ||
716 | self.assertTrue(isinstance(s, NavigableString)) | ||
717 | |||
718 | def test_new_string_can_create_navigablestring_subclass(self): | ||
719 | soup = self.soup("") | ||
720 | s = soup.new_string("foo", Comment) | ||
721 | self.assertEqual("foo", s) | ||
722 | self.assertTrue(isinstance(s, Comment)) | ||
723 | |||
724 | class TestTreeModification(SoupTest): | ||
725 | |||
726 | def test_attribute_modification(self): | ||
727 | soup = self.soup('<a id="1"></a>') | ||
728 | soup.a['id'] = 2 | ||
729 | self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>')) | ||
730 | del(soup.a['id']) | ||
731 | self.assertEqual(soup.decode(), self.document_for('<a></a>')) | ||
732 | soup.a['id2'] = 'foo' | ||
733 | self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>')) | ||
734 | |||
735 | def test_new_tag_creation(self): | ||
736 | builder = builder_registry.lookup('html')() | ||
737 | soup = self.soup("<body></body>", builder=builder) | ||
738 | a = Tag(soup, builder, 'a') | ||
739 | ol = Tag(soup, builder, 'ol') | ||
740 | a['href'] = 'http://foo.com/' | ||
741 | soup.body.insert(0, a) | ||
742 | soup.body.insert(1, ol) | ||
743 | self.assertEqual( | ||
744 | soup.body.encode(), | ||
745 | b'<body><a href="http://foo.com/"></a><ol></ol></body>') | ||
746 | |||
747 | def test_append_to_contents_moves_tag(self): | ||
748 | doc = """<p id="1">Don't leave me <b>here</b>.</p> | ||
749 | <p id="2">Don\'t leave!</p>""" | ||
750 | soup = self.soup(doc) | ||
751 | second_para = soup.find(id='2') | ||
752 | bold = soup.b | ||
753 | |||
754 | # Move the <b> tag to the end of the second paragraph. | ||
755 | soup.find(id='2').append(soup.b) | ||
756 | |||
757 | # The <b> tag is now a child of the second paragraph. | ||
758 | self.assertEqual(bold.parent, second_para) | ||
759 | |||
760 | self.assertEqual( | ||
761 | soup.decode(), self.document_for( | ||
762 | '<p id="1">Don\'t leave me .</p>\n' | ||
763 | '<p id="2">Don\'t leave!<b>here</b></p>')) | ||
764 | |||
765 | def test_replace_with_returns_thing_that_was_replaced(self): | ||
766 | text = "<a></a><b><c></c></b>" | ||
767 | soup = self.soup(text) | ||
768 | a = soup.a | ||
769 | new_a = a.replace_with(soup.c) | ||
770 | self.assertEqual(a, new_a) | ||
771 | |||
772 | def test_unwrap_returns_thing_that_was_replaced(self): | ||
773 | text = "<a><b></b><c></c></a>" | ||
774 | soup = self.soup(text) | ||
775 | a = soup.a | ||
776 | new_a = a.unwrap() | ||
777 | self.assertEqual(a, new_a) | ||
778 | |||
779 | def test_replace_with_and_unwrap_give_useful_exception_when_tag_has_no_parent(self): | ||
780 | soup = self.soup("<a><b>Foo</b></a><c>Bar</c>") | ||
781 | a = soup.a | ||
782 | a.extract() | ||
783 | self.assertEqual(None, a.parent) | ||
784 | self.assertRaises(ValueError, a.unwrap) | ||
785 | self.assertRaises(ValueError, a.replace_with, soup.c) | ||
786 | |||
787 | def test_replace_tag_with_itself(self): | ||
788 | text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>" | ||
789 | soup = self.soup(text) | ||
790 | c = soup.c | ||
791 | soup.c.replace_with(c) | ||
792 | self.assertEqual(soup.decode(), self.document_for(text)) | ||
793 | |||
794 | def test_replace_tag_with_its_parent_raises_exception(self): | ||
795 | text = "<a><b></b></a>" | ||
796 | soup = self.soup(text) | ||
797 | self.assertRaises(ValueError, soup.b.replace_with, soup.a) | ||
798 | |||
799 | def test_insert_tag_into_itself_raises_exception(self): | ||
800 | text = "<a><b></b></a>" | ||
801 | soup = self.soup(text) | ||
802 | self.assertRaises(ValueError, soup.a.insert, 0, soup.a) | ||
803 | |||
804 | def test_replace_with_maintains_next_element_throughout(self): | ||
805 | soup = self.soup('<p><a>one</a><b>three</b></p>') | ||
806 | a = soup.a | ||
807 | b = a.contents[0] | ||
808 | # Make it so the <a> tag has two text children. | ||
809 | a.insert(1, "two") | ||
810 | |||
811 | # Now replace each one with the empty string. | ||
812 | left, right = a.contents | ||
813 | left.replaceWith('') | ||
814 | right.replaceWith('') | ||
815 | |||
816 | # The <b> tag is still connected to the tree. | ||
817 | self.assertEqual("three", soup.b.string) | ||
818 | |||
819 | def test_replace_final_node(self): | ||
820 | soup = self.soup("<b>Argh!</b>") | ||
821 | soup.find(text="Argh!").replace_with("Hooray!") | ||
822 | new_text = soup.find(text="Hooray!") | ||
823 | b = soup.b | ||
824 | self.assertEqual(new_text.previous_element, b) | ||
825 | self.assertEqual(new_text.parent, b) | ||
826 | self.assertEqual(new_text.previous_element.next_element, new_text) | ||
827 | self.assertEqual(new_text.next_element, None) | ||
828 | |||
829 | def test_consecutive_text_nodes(self): | ||
830 | # A builder should never create two consecutive text nodes, | ||
831 | # but if you insert one next to another, Beautiful Soup will | ||
832 | # handle it correctly. | ||
833 | soup = self.soup("<a><b>Argh!</b><c></c></a>") | ||
834 | soup.b.insert(1, "Hooray!") | ||
835 | |||
836 | self.assertEqual( | ||
837 | soup.decode(), self.document_for( | ||
838 | "<a><b>Argh!Hooray!</b><c></c></a>")) | ||
839 | |||
840 | new_text = soup.find(text="Hooray!") | ||
841 | self.assertEqual(new_text.previous_element, "Argh!") | ||
842 | self.assertEqual(new_text.previous_element.next_element, new_text) | ||
843 | |||
844 | self.assertEqual(new_text.previous_sibling, "Argh!") | ||
845 | self.assertEqual(new_text.previous_sibling.next_sibling, new_text) | ||
846 | |||
847 | self.assertEqual(new_text.next_sibling, None) | ||
848 | self.assertEqual(new_text.next_element, soup.c) | ||
849 | |||
850 | def test_insert_string(self): | ||
851 | soup = self.soup("<a></a>") | ||
852 | soup.a.insert(0, "bar") | ||
853 | soup.a.insert(0, "foo") | ||
854 | # The string were added to the tag. | ||
855 | self.assertEqual(["foo", "bar"], soup.a.contents) | ||
856 | # And they were converted to NavigableStrings. | ||
857 | self.assertEqual(soup.a.contents[0].next_element, "bar") | ||
858 | |||
859 | def test_insert_tag(self): | ||
860 | builder = self.default_builder | ||
861 | soup = self.soup( | ||
862 | "<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder) | ||
863 | magic_tag = Tag(soup, builder, 'magictag') | ||
864 | magic_tag.insert(0, "the") | ||
865 | soup.a.insert(1, magic_tag) | ||
866 | |||
867 | self.assertEqual( | ||
868 | soup.decode(), self.document_for( | ||
869 | "<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>")) | ||
870 | |||
871 | # Make sure all the relationships are hooked up correctly. | ||
872 | b_tag = soup.b | ||
873 | self.assertEqual(b_tag.next_sibling, magic_tag) | ||
874 | self.assertEqual(magic_tag.previous_sibling, b_tag) | ||
875 | |||
876 | find = b_tag.find(text="Find") | ||
877 | self.assertEqual(find.next_element, magic_tag) | ||
878 | self.assertEqual(magic_tag.previous_element, find) | ||
879 | |||
880 | c_tag = soup.c | ||
881 | self.assertEqual(magic_tag.next_sibling, c_tag) | ||
882 | self.assertEqual(c_tag.previous_sibling, magic_tag) | ||
883 | |||
884 | the = magic_tag.find(text="the") | ||
885 | self.assertEqual(the.parent, magic_tag) | ||
886 | self.assertEqual(the.next_element, c_tag) | ||
887 | self.assertEqual(c_tag.previous_element, the) | ||
888 | |||
889 | def test_append_child_thats_already_at_the_end(self): | ||
890 | data = "<a><b></b></a>" | ||
891 | soup = self.soup(data) | ||
892 | soup.a.append(soup.b) | ||
893 | self.assertEqual(data, soup.decode()) | ||
894 | |||
895 | def test_move_tag_to_beginning_of_parent(self): | ||
896 | data = "<a><b></b><c></c><d></d></a>" | ||
897 | soup = self.soup(data) | ||
898 | soup.a.insert(0, soup.d) | ||
899 | self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode()) | ||
900 | |||
901 | def test_insert_works_on_empty_element_tag(self): | ||
902 | # This is a little strange, since most HTML parsers don't allow | ||
903 | # markup like this to come through. But in general, we don't | ||
904 | # know what the parser would or wouldn't have allowed, so | ||
905 | # I'm letting this succeed for now. | ||
906 | soup = self.soup("<br/>") | ||
907 | soup.br.insert(1, "Contents") | ||
908 | self.assertEqual(str(soup.br), "<br>Contents</br>") | ||
909 | |||
910 | def test_insert_before(self): | ||
911 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
912 | soup.b.insert_before("BAZ") | ||
913 | soup.a.insert_before("QUUX") | ||
914 | self.assertEqual( | ||
915 | soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>")) | ||
916 | |||
917 | soup.a.insert_before(soup.b) | ||
918 | self.assertEqual( | ||
919 | soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ")) | ||
920 | |||
921 | def test_insert_after(self): | ||
922 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
923 | soup.b.insert_after("BAZ") | ||
924 | soup.a.insert_after("QUUX") | ||
925 | self.assertEqual( | ||
926 | soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ")) | ||
927 | soup.b.insert_after(soup.a) | ||
928 | self.assertEqual( | ||
929 | soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ")) | ||
930 | |||
931 | def test_insert_after_raises_exception_if_after_has_no_meaning(self): | ||
932 | soup = self.soup("") | ||
933 | tag = soup.new_tag("a") | ||
934 | string = soup.new_string("") | ||
935 | self.assertRaises(ValueError, string.insert_after, tag) | ||
936 | self.assertRaises(NotImplementedError, soup.insert_after, tag) | ||
937 | self.assertRaises(ValueError, tag.insert_after, tag) | ||
938 | |||
939 | def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self): | ||
940 | soup = self.soup("") | ||
941 | tag = soup.new_tag("a") | ||
942 | string = soup.new_string("") | ||
943 | self.assertRaises(ValueError, string.insert_before, tag) | ||
944 | self.assertRaises(NotImplementedError, soup.insert_before, tag) | ||
945 | self.assertRaises(ValueError, tag.insert_before, tag) | ||
946 | |||
947 | def test_replace_with(self): | ||
948 | soup = self.soup( | ||
949 | "<p>There's <b>no</b> business like <b>show</b> business</p>") | ||
950 | no, show = soup.find_all('b') | ||
951 | show.replace_with(no) | ||
952 | self.assertEqual( | ||
953 | soup.decode(), | ||
954 | self.document_for( | ||
955 | "<p>There's business like <b>no</b> business</p>")) | ||
956 | |||
957 | self.assertEqual(show.parent, None) | ||
958 | self.assertEqual(no.parent, soup.p) | ||
959 | self.assertEqual(no.next_element, "no") | ||
960 | self.assertEqual(no.next_sibling, " business") | ||
961 | |||
962 | def test_replace_first_child(self): | ||
963 | data = "<a><b></b><c></c></a>" | ||
964 | soup = self.soup(data) | ||
965 | soup.b.replace_with(soup.c) | ||
966 | self.assertEqual("<a><c></c></a>", soup.decode()) | ||
967 | |||
968 | def test_replace_last_child(self): | ||
969 | data = "<a><b></b><c></c></a>" | ||
970 | soup = self.soup(data) | ||
971 | soup.c.replace_with(soup.b) | ||
972 | self.assertEqual("<a><b></b></a>", soup.decode()) | ||
973 | |||
974 | def test_nested_tag_replace_with(self): | ||
975 | soup = self.soup( | ||
976 | """<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""") | ||
977 | |||
978 | # Replace the entire <b> tag and its contents ("reserve the | ||
979 | # right") with the <f> tag ("refuse"). | ||
980 | remove_tag = soup.b | ||
981 | move_tag = soup.f | ||
982 | remove_tag.replace_with(move_tag) | ||
983 | |||
984 | self.assertEqual( | ||
985 | soup.decode(), self.document_for( | ||
986 | "<a>We<f>refuse</f></a><e>to<g>service</g></e>")) | ||
987 | |||
988 | # The <b> tag is now an orphan. | ||
989 | self.assertEqual(remove_tag.parent, None) | ||
990 | self.assertEqual(remove_tag.find(text="right").next_element, None) | ||
991 | self.assertEqual(remove_tag.previous_element, None) | ||
992 | self.assertEqual(remove_tag.next_sibling, None) | ||
993 | self.assertEqual(remove_tag.previous_sibling, None) | ||
994 | |||
995 | # The <f> tag is now connected to the <a> tag. | ||
996 | self.assertEqual(move_tag.parent, soup.a) | ||
997 | self.assertEqual(move_tag.previous_element, "We") | ||
998 | self.assertEqual(move_tag.next_element.next_element, soup.e) | ||
999 | self.assertEqual(move_tag.next_sibling, None) | ||
1000 | |||
1001 | # The gap where the <f> tag used to be has been mended, and | ||
1002 | # the word "to" is now connected to the <g> tag. | ||
1003 | to_text = soup.find(text="to") | ||
1004 | g_tag = soup.g | ||
1005 | self.assertEqual(to_text.next_element, g_tag) | ||
1006 | self.assertEqual(to_text.next_sibling, g_tag) | ||
1007 | self.assertEqual(g_tag.previous_element, to_text) | ||
1008 | self.assertEqual(g_tag.previous_sibling, to_text) | ||
1009 | |||
1010 | def test_unwrap(self): | ||
1011 | tree = self.soup(""" | ||
1012 | <p>Unneeded <em>formatting</em> is unneeded</p> | ||
1013 | """) | ||
1014 | tree.em.unwrap() | ||
1015 | self.assertEqual(tree.em, None) | ||
1016 | self.assertEqual(tree.p.text, "Unneeded formatting is unneeded") | ||
1017 | |||
1018 | def test_wrap(self): | ||
1019 | soup = self.soup("I wish I was bold.") | ||
1020 | value = soup.string.wrap(soup.new_tag("b")) | ||
1021 | self.assertEqual(value.decode(), "<b>I wish I was bold.</b>") | ||
1022 | self.assertEqual( | ||
1023 | soup.decode(), self.document_for("<b>I wish I was bold.</b>")) | ||
1024 | |||
1025 | def test_wrap_extracts_tag_from_elsewhere(self): | ||
1026 | soup = self.soup("<b></b>I wish I was bold.") | ||
1027 | soup.b.next_sibling.wrap(soup.b) | ||
1028 | self.assertEqual( | ||
1029 | soup.decode(), self.document_for("<b>I wish I was bold.</b>")) | ||
1030 | |||
1031 | def test_wrap_puts_new_contents_at_the_end(self): | ||
1032 | soup = self.soup("<b>I like being bold.</b>I wish I was bold.") | ||
1033 | soup.b.next_sibling.wrap(soup.b) | ||
1034 | self.assertEqual(2, len(soup.b.contents)) | ||
1035 | self.assertEqual( | ||
1036 | soup.decode(), self.document_for( | ||
1037 | "<b>I like being bold.I wish I was bold.</b>")) | ||
1038 | |||
1039 | def test_extract(self): | ||
1040 | soup = self.soup( | ||
1041 | '<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>') | ||
1042 | |||
1043 | self.assertEqual(len(soup.body.contents), 3) | ||
1044 | extracted = soup.find(id="nav").extract() | ||
1045 | |||
1046 | self.assertEqual( | ||
1047 | soup.decode(), "<html><body>Some content. More content.</body></html>") | ||
1048 | self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>') | ||
1049 | |||
1050 | # The extracted tag is now an orphan. | ||
1051 | self.assertEqual(len(soup.body.contents), 2) | ||
1052 | self.assertEqual(extracted.parent, None) | ||
1053 | self.assertEqual(extracted.previous_element, None) | ||
1054 | self.assertEqual(extracted.next_element.next_element, None) | ||
1055 | |||
1056 | # The gap where the extracted tag used to be has been mended. | ||
1057 | content_1 = soup.find(text="Some content. ") | ||
1058 | content_2 = soup.find(text=" More content.") | ||
1059 | self.assertEqual(content_1.next_element, content_2) | ||
1060 | self.assertEqual(content_1.next_sibling, content_2) | ||
1061 | self.assertEqual(content_2.previous_element, content_1) | ||
1062 | self.assertEqual(content_2.previous_sibling, content_1) | ||
1063 | |||
1064 | def test_extract_distinguishes_between_identical_strings(self): | ||
1065 | soup = self.soup("<a>foo</a><b>bar</b>") | ||
1066 | foo_1 = soup.a.string | ||
1067 | bar_1 = soup.b.string | ||
1068 | foo_2 = soup.new_string("foo") | ||
1069 | bar_2 = soup.new_string("bar") | ||
1070 | soup.a.append(foo_2) | ||
1071 | soup.b.append(bar_2) | ||
1072 | |||
1073 | # Now there are two identical strings in the <a> tag, and two | ||
1074 | # in the <b> tag. Let's remove the first "foo" and the second | ||
1075 | # "bar". | ||
1076 | foo_1.extract() | ||
1077 | bar_2.extract() | ||
1078 | self.assertEqual(foo_2, soup.a.string) | ||
1079 | self.assertEqual(bar_2, soup.b.string) | ||
1080 | |||
1081 | def test_extract_multiples_of_same_tag(self): | ||
1082 | soup = self.soup(""" | ||
1083 | <html> | ||
1084 | <head> | ||
1085 | <script>foo</script> | ||
1086 | </head> | ||
1087 | <body> | ||
1088 | <script>bar</script> | ||
1089 | <a></a> | ||
1090 | </body> | ||
1091 | <script>baz</script> | ||
1092 | </html>""") | ||
1093 | [soup.script.extract() for i in soup.find_all("script")] | ||
1094 | self.assertEqual("<body>\n\n<a></a>\n</body>", str(soup.body)) | ||
1095 | |||
1096 | |||
1097 | def test_extract_works_when_element_is_surrounded_by_identical_strings(self): | ||
1098 | soup = self.soup( | ||
1099 | '<html>\n' | ||
1100 | '<body>hi</body>\n' | ||
1101 | '</html>') | ||
1102 | soup.find('body').extract() | ||
1103 | self.assertEqual(None, soup.find('body')) | ||
1104 | |||
1105 | |||
1106 | def test_clear(self): | ||
1107 | """Tag.clear()""" | ||
1108 | soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>") | ||
1109 | # clear using extract() | ||
1110 | a = soup.a | ||
1111 | soup.p.clear() | ||
1112 | self.assertEqual(len(soup.p.contents), 0) | ||
1113 | self.assertTrue(hasattr(a, "contents")) | ||
1114 | |||
1115 | # clear using decompose() | ||
1116 | em = a.em | ||
1117 | a.clear(decompose=True) | ||
1118 | self.assertEqual(0, len(em.contents)) | ||
1119 | |||
1120 | def test_string_set(self): | ||
1121 | """Tag.string = 'string'""" | ||
1122 | soup = self.soup("<a></a> <b><c></c></b>") | ||
1123 | soup.a.string = "foo" | ||
1124 | self.assertEqual(soup.a.contents, ["foo"]) | ||
1125 | soup.b.string = "bar" | ||
1126 | self.assertEqual(soup.b.contents, ["bar"]) | ||
1127 | |||
1128 | def test_string_set_does_not_affect_original_string(self): | ||
1129 | soup = self.soup("<a><b>foo</b><c>bar</c>") | ||
1130 | soup.b.string = soup.c.string | ||
1131 | self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>") | ||
1132 | |||
1133 | def test_set_string_preserves_class_of_string(self): | ||
1134 | soup = self.soup("<a></a>") | ||
1135 | cdata = CData("foo") | ||
1136 | soup.a.string = cdata | ||
1137 | self.assertTrue(isinstance(soup.a.string, CData)) | ||
1138 | |||
1139 | class TestElementObjects(SoupTest): | ||
1140 | """Test various features of element objects.""" | ||
1141 | |||
1142 | def test_len(self): | ||
1143 | """The length of an element is its number of children.""" | ||
1144 | soup = self.soup("<top>1<b>2</b>3</top>") | ||
1145 | |||
1146 | # The BeautifulSoup object itself contains one element: the | ||
1147 | # <top> tag. | ||
1148 | self.assertEqual(len(soup.contents), 1) | ||
1149 | self.assertEqual(len(soup), 1) | ||
1150 | |||
1151 | # The <top> tag contains three elements: the text node "1", the | ||
1152 | # <b> tag, and the text node "3". | ||
1153 | self.assertEqual(len(soup.top), 3) | ||
1154 | self.assertEqual(len(soup.top.contents), 3) | ||
1155 | |||
1156 | def test_member_access_invokes_find(self): | ||
1157 | """Accessing a Python member .foo invokes find('foo')""" | ||
1158 | soup = self.soup('<b><i></i></b>') | ||
1159 | self.assertEqual(soup.b, soup.find('b')) | ||
1160 | self.assertEqual(soup.b.i, soup.find('b').find('i')) | ||
1161 | self.assertEqual(soup.a, None) | ||
1162 | |||
1163 | def test_deprecated_member_access(self): | ||
1164 | soup = self.soup('<b><i></i></b>') | ||
1165 | with warnings.catch_warnings(record=True) as w: | ||
1166 | tag = soup.bTag | ||
1167 | self.assertEqual(soup.b, tag) | ||
1168 | self.assertEqual( | ||
1169 | '.bTag is deprecated, use .find("b") instead.', | ||
1170 | str(w[0].message)) | ||
1171 | |||
1172 | def test_has_attr(self): | ||
1173 | """has_attr() checks for the presence of an attribute. | ||
1174 | |||
1175 | Please note note: has_attr() is different from | ||
1176 | __in__. has_attr() checks the tag's attributes and __in__ | ||
1177 | checks the tag's chidlren. | ||
1178 | """ | ||
1179 | soup = self.soup("<foo attr='bar'>") | ||
1180 | self.assertTrue(soup.foo.has_attr('attr')) | ||
1181 | self.assertFalse(soup.foo.has_attr('attr2')) | ||
1182 | |||
1183 | |||
1184 | def test_attributes_come_out_in_alphabetical_order(self): | ||
1185 | markup = '<b a="1" z="5" m="3" f="2" y="4"></b>' | ||
1186 | self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>') | ||
1187 | |||
1188 | def test_string(self): | ||
1189 | # A tag that contains only a text node makes that node | ||
1190 | # available as .string. | ||
1191 | soup = self.soup("<b>foo</b>") | ||
1192 | self.assertEqual(soup.b.string, 'foo') | ||
1193 | |||
1194 | def test_empty_tag_has_no_string(self): | ||
1195 | # A tag with no children has no .stirng. | ||
1196 | soup = self.soup("<b></b>") | ||
1197 | self.assertEqual(soup.b.string, None) | ||
1198 | |||
1199 | def test_tag_with_multiple_children_has_no_string(self): | ||
1200 | # A tag with no children has no .string. | ||
1201 | soup = self.soup("<a>foo<b></b><b></b></b>") | ||
1202 | self.assertEqual(soup.b.string, None) | ||
1203 | |||
1204 | soup = self.soup("<a>foo<b></b>bar</b>") | ||
1205 | self.assertEqual(soup.b.string, None) | ||
1206 | |||
1207 | # Even if all the children are strings, due to trickery, | ||
1208 | # it won't work--but this would be a good optimization. | ||
1209 | soup = self.soup("<a>foo</b>") | ||
1210 | soup.a.insert(1, "bar") | ||
1211 | self.assertEqual(soup.a.string, None) | ||
1212 | |||
1213 | def test_tag_with_recursive_string_has_string(self): | ||
1214 | # A tag with a single child which has a .string inherits that | ||
1215 | # .string. | ||
1216 | soup = self.soup("<a><b>foo</b></a>") | ||
1217 | self.assertEqual(soup.a.string, "foo") | ||
1218 | self.assertEqual(soup.string, "foo") | ||
1219 | |||
1220 | def test_lack_of_string(self): | ||
1221 | """Only a tag containing a single text node has a .string.""" | ||
1222 | soup = self.soup("<b>f<i>e</i>o</b>") | ||
1223 | self.assertFalse(soup.b.string) | ||
1224 | |||
1225 | soup = self.soup("<b></b>") | ||
1226 | self.assertFalse(soup.b.string) | ||
1227 | |||
1228 | def test_all_text(self): | ||
1229 | """Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated""" | ||
1230 | soup = self.soup("<a>a<b>r</b> <r> t </r></a>") | ||
1231 | self.assertEqual(soup.a.text, "ar t ") | ||
1232 | self.assertEqual(soup.a.get_text(strip=True), "art") | ||
1233 | self.assertEqual(soup.a.get_text(","), "a,r, , t ") | ||
1234 | self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t") | ||
1235 | |||
1236 | def test_get_text_ignores_comments(self): | ||
1237 | soup = self.soup("foo<!--IGNORE-->bar") | ||
1238 | self.assertEqual(soup.get_text(), "foobar") | ||
1239 | |||
1240 | self.assertEqual( | ||
1241 | soup.get_text(types=(NavigableString, Comment)), "fooIGNOREbar") | ||
1242 | self.assertEqual( | ||
1243 | soup.get_text(types=None), "fooIGNOREbar") | ||
1244 | |||
1245 | def test_all_strings_ignores_comments(self): | ||
1246 | soup = self.soup("foo<!--IGNORE-->bar") | ||
1247 | self.assertEqual(['foo', 'bar'], list(soup.strings)) | ||
1248 | |||
1249 | class TestCDAtaListAttributes(SoupTest): | ||
1250 | |||
1251 | """Testing cdata-list attributes like 'class'. | ||
1252 | """ | ||
1253 | def test_single_value_becomes_list(self): | ||
1254 | soup = self.soup("<a class='foo'>") | ||
1255 | self.assertEqual(["foo"],soup.a['class']) | ||
1256 | |||
1257 | def test_multiple_values_becomes_list(self): | ||
1258 | soup = self.soup("<a class='foo bar'>") | ||
1259 | self.assertEqual(["foo", "bar"], soup.a['class']) | ||
1260 | |||
1261 | def test_multiple_values_separated_by_weird_whitespace(self): | ||
1262 | soup = self.soup("<a class='foo\tbar\nbaz'>") | ||
1263 | self.assertEqual(["foo", "bar", "baz"],soup.a['class']) | ||
1264 | |||
1265 | def test_attributes_joined_into_string_on_output(self): | ||
1266 | soup = self.soup("<a class='foo\tbar'>") | ||
1267 | self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode()) | ||
1268 | |||
1269 | def test_accept_charset(self): | ||
1270 | soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">') | ||
1271 | self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset']) | ||
1272 | |||
1273 | def test_cdata_attribute_applying_only_to_one_tag(self): | ||
1274 | data = '<a accept-charset="ISO-8859-1 UTF-8"></a>' | ||
1275 | soup = self.soup(data) | ||
1276 | # We saw in another test that accept-charset is a cdata-list | ||
1277 | # attribute for the <form> tag. But it's not a cdata-list | ||
1278 | # attribute for any other tag. | ||
1279 | self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset']) | ||
1280 | |||
1281 | def test_string_has_immutable_name_property(self): | ||
1282 | string = self.soup("s").string | ||
1283 | self.assertEqual(None, string.name) | ||
1284 | def t(): | ||
1285 | string.name = 'foo' | ||
1286 | self.assertRaises(AttributeError, t) | ||
1287 | |||
1288 | class TestPersistence(SoupTest): | ||
1289 | "Testing features like pickle and deepcopy." | ||
1290 | |||
1291 | def setUp(self): | ||
1292 | super(TestPersistence, self).setUp() | ||
1293 | self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" | ||
1294 | "http://www.w3.org/TR/REC-html40/transitional.dtd"> | ||
1295 | <html> | ||
1296 | <head> | ||
1297 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | ||
1298 | <title>Beautiful Soup: We called him Tortoise because he taught us.</title> | ||
1299 | <link rev="made" href="mailto:leonardr@segfault.org"> | ||
1300 | <meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping."> | ||
1301 | <meta name="generator" content="Markov Approximation 1.4 (module: leonardr)"> | ||
1302 | <meta name="author" content="Leonard Richardson"> | ||
1303 | </head> | ||
1304 | <body> | ||
1305 | <a href="foo">foo</a> | ||
1306 | <a href="foo"><b>bar</b></a> | ||
1307 | </body> | ||
1308 | </html>""" | ||
1309 | self.tree = self.soup(self.page) | ||
1310 | |||
1311 | def test_pickle_and_unpickle_identity(self): | ||
1312 | # Pickling a tree, then unpickling it, yields a tree identical | ||
1313 | # to the original. | ||
1314 | dumped = pickle.dumps(self.tree, 2) | ||
1315 | loaded = pickle.loads(dumped) | ||
1316 | self.assertEqual(loaded.__class__, BeautifulSoup) | ||
1317 | self.assertEqual(loaded.decode(), self.tree.decode()) | ||
1318 | |||
1319 | def test_deepcopy_identity(self): | ||
1320 | # Making a deepcopy of a tree yields an identical tree. | ||
1321 | copied = copy.deepcopy(self.tree) | ||
1322 | self.assertEqual(copied.decode(), self.tree.decode()) | ||
1323 | |||
1324 | def test_unicode_pickle(self): | ||
1325 | # A tree containing Unicode characters can be pickled. | ||
1326 | html = "<b>\N{SNOWMAN}</b>" | ||
1327 | soup = self.soup(html) | ||
1328 | dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL) | ||
1329 | loaded = pickle.loads(dumped) | ||
1330 | self.assertEqual(loaded.decode(), soup.decode()) | ||
1331 | |||
1332 | def test_copy_navigablestring_is_not_attached_to_tree(self): | ||
1333 | html = "<b>Foo<a></a></b><b>Bar</b>" | ||
1334 | soup = self.soup(html) | ||
1335 | s1 = soup.find(string="Foo") | ||
1336 | s2 = copy.copy(s1) | ||
1337 | self.assertEqual(s1, s2) | ||
1338 | self.assertEqual(None, s2.parent) | ||
1339 | self.assertEqual(None, s2.next_element) | ||
1340 | self.assertNotEqual(None, s1.next_sibling) | ||
1341 | self.assertEqual(None, s2.next_sibling) | ||
1342 | self.assertEqual(None, s2.previous_element) | ||
1343 | |||
1344 | def test_copy_navigablestring_subclass_has_same_type(self): | ||
1345 | html = "<b><!--Foo--></b>" | ||
1346 | soup = self.soup(html) | ||
1347 | s1 = soup.string | ||
1348 | s2 = copy.copy(s1) | ||
1349 | self.assertEqual(s1, s2) | ||
1350 | self.assertTrue(isinstance(s2, Comment)) | ||
1351 | |||
1352 | def test_copy_entire_soup(self): | ||
1353 | html = "<div><b>Foo<a></a></b><b>Bar</b></div>end" | ||
1354 | soup = self.soup(html) | ||
1355 | soup_copy = copy.copy(soup) | ||
1356 | self.assertEqual(soup, soup_copy) | ||
1357 | |||
1358 | def test_copy_tag_copies_contents(self): | ||
1359 | html = "<div><b>Foo<a></a></b><b>Bar</b></div>end" | ||
1360 | soup = self.soup(html) | ||
1361 | div = soup.div | ||
1362 | div_copy = copy.copy(div) | ||
1363 | |||
1364 | # The two tags look the same, and evaluate to equal. | ||
1365 | self.assertEqual(str(div), str(div_copy)) | ||
1366 | self.assertEqual(div, div_copy) | ||
1367 | |||
1368 | # But they're not the same object. | ||
1369 | self.assertFalse(div is div_copy) | ||
1370 | |||
1371 | # And they don't have the same relation to the parse tree. The | ||
1372 | # copy is not associated with a parse tree at all. | ||
1373 | self.assertEqual(None, div_copy.parent) | ||
1374 | self.assertEqual(None, div_copy.previous_element) | ||
1375 | self.assertEqual(None, div_copy.find(string='Bar').next_element) | ||
1376 | self.assertNotEqual(None, div.find(string='Bar').next_element) | ||
1377 | |||
1378 | class TestSubstitutions(SoupTest): | ||
1379 | |||
1380 | def test_default_formatter_is_minimal(self): | ||
1381 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1382 | soup = self.soup(markup) | ||
1383 | decoded = soup.decode(formatter="minimal") | ||
1384 | # The < is converted back into < but the e-with-acute is left alone. | ||
1385 | self.assertEqual( | ||
1386 | decoded, | ||
1387 | self.document_for( | ||
1388 | "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1389 | |||
1390 | def test_formatter_html(self): | ||
1391 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1392 | soup = self.soup(markup) | ||
1393 | decoded = soup.decode(formatter="html") | ||
1394 | self.assertEqual( | ||
1395 | decoded, | ||
1396 | self.document_for("<b><<Sacré bleu!>></b>")) | ||
1397 | |||
1398 | def test_formatter_minimal(self): | ||
1399 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1400 | soup = self.soup(markup) | ||
1401 | decoded = soup.decode(formatter="minimal") | ||
1402 | # The < is converted back into < but the e-with-acute is left alone. | ||
1403 | self.assertEqual( | ||
1404 | decoded, | ||
1405 | self.document_for( | ||
1406 | "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1407 | |||
1408 | def test_formatter_null(self): | ||
1409 | markup = "<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>" | ||
1410 | soup = self.soup(markup) | ||
1411 | decoded = soup.decode(formatter=None) | ||
1412 | # Neither the angle brackets nor the e-with-acute are converted. | ||
1413 | # This is not valid HTML, but it's what the user wanted. | ||
1414 | self.assertEqual(decoded, | ||
1415 | self.document_for("<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>")) | ||
1416 | |||
1417 | def test_formatter_custom(self): | ||
1418 | markup = "<b><foo></b><b>bar</b>" | ||
1419 | soup = self.soup(markup) | ||
1420 | decoded = soup.decode(formatter = lambda x: x.upper()) | ||
1421 | # Instead of normal entity conversion code, the custom | ||
1422 | # callable is called on every string. | ||
1423 | self.assertEqual( | ||
1424 | decoded, | ||
1425 | self.document_for("<b><FOO></b><b>BAR</b>")) | ||
1426 | |||
1427 | def test_formatter_is_run_on_attribute_values(self): | ||
1428 | markup = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1429 | soup = self.soup(markup) | ||
1430 | a = soup.a | ||
1431 | |||
1432 | expect_minimal = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1433 | |||
1434 | self.assertEqual(expect_minimal, a.decode()) | ||
1435 | self.assertEqual(expect_minimal, a.decode(formatter="minimal")) | ||
1436 | |||
1437 | expect_html = '<a href="http://a.com?a=b&c=é">e</a>' | ||
1438 | self.assertEqual(expect_html, a.decode(formatter="html")) | ||
1439 | |||
1440 | self.assertEqual(markup, a.decode(formatter=None)) | ||
1441 | expect_upper = '<a href="HTTP://A.COM?A=B&C=É">E</a>' | ||
1442 | self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper())) | ||
1443 | |||
1444 | def test_formatter_skips_script_tag_for_html_documents(self): | ||
1445 | doc = """ | ||
1446 | <script type="text/javascript"> | ||
1447 | console.log("< < hey > > "); | ||
1448 | </script> | ||
1449 | """ | ||
1450 | encoded = BeautifulSoup(doc, 'html.parser').encode() | ||
1451 | self.assertTrue(b"< < hey > >" in encoded) | ||
1452 | |||
1453 | def test_formatter_skips_style_tag_for_html_documents(self): | ||
1454 | doc = """ | ||
1455 | <style type="text/css"> | ||
1456 | console.log("< < hey > > "); | ||
1457 | </style> | ||
1458 | """ | ||
1459 | encoded = BeautifulSoup(doc, 'html.parser').encode() | ||
1460 | self.assertTrue(b"< < hey > >" in encoded) | ||
1461 | |||
1462 | def test_prettify_leaves_preformatted_text_alone(self): | ||
1463 | soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz ") | ||
1464 | # Everything outside the <pre> tag is reformatted, but everything | ||
1465 | # inside is left alone. | ||
1466 | self.assertEqual( | ||
1467 | '<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n</div>', | ||
1468 | soup.div.prettify()) | ||
1469 | |||
1470 | def test_prettify_accepts_formatter(self): | ||
1471 | soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser') | ||
1472 | pretty = soup.prettify(formatter = lambda x: x.upper()) | ||
1473 | self.assertTrue("FOO" in pretty) | ||
1474 | |||
1475 | def test_prettify_outputs_unicode_by_default(self): | ||
1476 | soup = self.soup("<a></a>") | ||
1477 | self.assertEqual(str, type(soup.prettify())) | ||
1478 | |||
1479 | def test_prettify_can_encode_data(self): | ||
1480 | soup = self.soup("<a></a>") | ||
1481 | self.assertEqual(bytes, type(soup.prettify("utf-8"))) | ||
1482 | |||
1483 | def test_html_entity_substitution_off_by_default(self): | ||
1484 | markup = "<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>" | ||
1485 | soup = self.soup(markup) | ||
1486 | encoded = soup.b.encode("utf-8") | ||
1487 | self.assertEqual(encoded, markup.encode('utf-8')) | ||
1488 | |||
1489 | def test_encoding_substitution(self): | ||
1490 | # Here's the <meta> tag saying that a document is | ||
1491 | # encoded in Shift-JIS. | ||
1492 | meta_tag = ('<meta content="text/html; charset=x-sjis" ' | ||
1493 | 'http-equiv="Content-type"/>') | ||
1494 | soup = self.soup(meta_tag) | ||
1495 | |||
1496 | # Parse the document, and the charset apprears unchanged. | ||
1497 | self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis') | ||
1498 | |||
1499 | # Encode the document into some encoding, and the encoding is | ||
1500 | # substituted into the meta tag. | ||
1501 | utf_8 = soup.encode("utf-8") | ||
1502 | self.assertTrue(b"charset=utf-8" in utf_8) | ||
1503 | |||
1504 | euc_jp = soup.encode("euc_jp") | ||
1505 | self.assertTrue(b"charset=euc_jp" in euc_jp) | ||
1506 | |||
1507 | shift_jis = soup.encode("shift-jis") | ||
1508 | self.assertTrue(b"charset=shift-jis" in shift_jis) | ||
1509 | |||
1510 | utf_16_u = soup.encode("utf-16").decode("utf-16") | ||
1511 | self.assertTrue("charset=utf-16" in utf_16_u) | ||
1512 | |||
1513 | def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self): | ||
1514 | markup = ('<head><meta content="text/html; charset=x-sjis" ' | ||
1515 | 'http-equiv="Content-type"/></head><pre>foo</pre>') | ||
1516 | |||
1517 | # Beautiful Soup used to try to rewrite the meta tag even if the | ||
1518 | # meta tag got filtered out by the strainer. This test makes | ||
1519 | # sure that doesn't happen. | ||
1520 | strainer = SoupStrainer('pre') | ||
1521 | soup = self.soup(markup, parse_only=strainer) | ||
1522 | self.assertEqual(soup.contents[0].name, 'pre') | ||
1523 | |||
1524 | class TestEncoding(SoupTest): | ||
1525 | """Test the ability to encode objects into strings.""" | ||
1526 | |||
1527 | def test_unicode_string_can_be_encoded(self): | ||
1528 | html = "<b>\N{SNOWMAN}</b>" | ||
1529 | soup = self.soup(html) | ||
1530 | self.assertEqual(soup.b.string.encode("utf-8"), | ||
1531 | "\N{SNOWMAN}".encode("utf-8")) | ||
1532 | |||
1533 | def test_tag_containing_unicode_string_can_be_encoded(self): | ||
1534 | html = "<b>\N{SNOWMAN}</b>" | ||
1535 | soup = self.soup(html) | ||
1536 | self.assertEqual( | ||
1537 | soup.b.encode("utf-8"), html.encode("utf-8")) | ||
1538 | |||
1539 | def test_encoding_substitutes_unrecognized_characters_by_default(self): | ||
1540 | html = "<b>\N{SNOWMAN}</b>" | ||
1541 | soup = self.soup(html) | ||
1542 | self.assertEqual(soup.b.encode("ascii"), b"<b>☃</b>") | ||
1543 | |||
1544 | def test_encoding_can_be_made_strict(self): | ||
1545 | html = "<b>\N{SNOWMAN}</b>" | ||
1546 | soup = self.soup(html) | ||
1547 | self.assertRaises( | ||
1548 | UnicodeEncodeError, soup.encode, "ascii", errors="strict") | ||
1549 | |||
1550 | def test_decode_contents(self): | ||
1551 | html = "<b>\N{SNOWMAN}</b>" | ||
1552 | soup = self.soup(html) | ||
1553 | self.assertEqual("\N{SNOWMAN}", soup.b.decode_contents()) | ||
1554 | |||
1555 | def test_encode_contents(self): | ||
1556 | html = "<b>\N{SNOWMAN}</b>" | ||
1557 | soup = self.soup(html) | ||
1558 | self.assertEqual( | ||
1559 | "\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents( | ||
1560 | encoding="utf8")) | ||
1561 | |||
1562 | def test_deprecated_renderContents(self): | ||
1563 | html = "<b>\N{SNOWMAN}</b>" | ||
1564 | soup = self.soup(html) | ||
1565 | self.assertEqual( | ||
1566 | "\N{SNOWMAN}".encode("utf8"), soup.b.renderContents()) | ||
1567 | |||
1568 | def test_repr(self): | ||
1569 | html = "<b>\N{SNOWMAN}</b>" | ||
1570 | soup = self.soup(html) | ||
1571 | if PY3K: | ||
1572 | self.assertEqual(html, repr(soup)) | ||
1573 | else: | ||
1574 | self.assertEqual(b'<b>\\u2603</b>', repr(soup)) | ||
1575 | |||
1576 | class TestNavigableStringSubclasses(SoupTest): | ||
1577 | |||
1578 | def test_cdata(self): | ||
1579 | # None of the current builders turn CDATA sections into CData | ||
1580 | # objects, but you can create them manually. | ||
1581 | soup = self.soup("") | ||
1582 | cdata = CData("foo") | ||
1583 | soup.insert(1, cdata) | ||
1584 | self.assertEqual(str(soup), "<![CDATA[foo]]>") | ||
1585 | self.assertEqual(soup.find(text="foo"), "foo") | ||
1586 | self.assertEqual(soup.contents[0], "foo") | ||
1587 | |||
1588 | def test_cdata_is_never_formatted(self): | ||
1589 | """Text inside a CData object is passed into the formatter. | ||
1590 | |||
1591 | But the return value is ignored. | ||
1592 | """ | ||
1593 | |||
1594 | self.count = 0 | ||
1595 | def increment(*args): | ||
1596 | self.count += 1 | ||
1597 | return "BITTER FAILURE" | ||
1598 | |||
1599 | soup = self.soup("") | ||
1600 | cdata = CData("<><><>") | ||
1601 | soup.insert(1, cdata) | ||
1602 | self.assertEqual( | ||
1603 | b"<![CDATA[<><><>]]>", soup.encode(formatter=increment)) | ||
1604 | self.assertEqual(1, self.count) | ||
1605 | |||
1606 | def test_doctype_ends_in_newline(self): | ||
1607 | # Unlike other NavigableString subclasses, a DOCTYPE always ends | ||
1608 | # in a newline. | ||
1609 | doctype = Doctype("foo") | ||
1610 | soup = self.soup("") | ||
1611 | soup.insert(1, doctype) | ||
1612 | self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n") | ||
1613 | |||
1614 | def test_declaration(self): | ||
1615 | d = Declaration("foo") | ||
1616 | self.assertEqual("<?foo?>", d.output_ready()) | ||
1617 | |||
1618 | class TestSoupSelector(TreeTest): | ||
1619 | |||
1620 | HTML = """ | ||
1621 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" | ||
1622 | "http://www.w3.org/TR/html4/strict.dtd"> | ||
1623 | <html> | ||
1624 | <head> | ||
1625 | <title>The title</title> | ||
1626 | <link rel="stylesheet" href="blah.css" type="text/css" id="l1"> | ||
1627 | </head> | ||
1628 | <body> | ||
1629 | <custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag> | ||
1630 | <div id="main" class="fancy"> | ||
1631 | <div id="inner"> | ||
1632 | <h1 id="header1">An H1</h1> | ||
1633 | <p>Some text</p> | ||
1634 | <p class="onep" id="p1">Some more text</p> | ||
1635 | <h2 id="header2">An H2</h2> | ||
1636 | <p class="class1 class2 class3" id="pmulti">Another</p> | ||
1637 | <a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a> | ||
1638 | <h2 id="header3">Another H2</h2> | ||
1639 | <a id="me" href="http://simonwillison.net/" rel="me">me</a> | ||
1640 | <span class="s1"> | ||
1641 | <a href="#" id="s1a1">span1a1</a> | ||
1642 | <a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a> | ||
1643 | <span class="span2"> | ||
1644 | <a href="#" id="s2a1">span2a1</a> | ||
1645 | </span> | ||
1646 | <span class="span3"></span> | ||
1647 | <custom-dashed-tag class="dashed" id="dash2"/> | ||
1648 | <div data-tag="dashedvalue" id="data1"/> | ||
1649 | </span> | ||
1650 | </div> | ||
1651 | <x id="xid"> | ||
1652 | <z id="zida"/> | ||
1653 | <z id="zidab"/> | ||
1654 | <z id="zidac"/> | ||
1655 | </x> | ||
1656 | <y id="yid"> | ||
1657 | <z id="zidb"/> | ||
1658 | </y> | ||
1659 | <p lang="en" id="lang-en">English</p> | ||
1660 | <p lang="en-gb" id="lang-en-gb">English UK</p> | ||
1661 | <p lang="en-us" id="lang-en-us">English US</p> | ||
1662 | <p lang="fr" id="lang-fr">French</p> | ||
1663 | </div> | ||
1664 | |||
1665 | <div id="footer"> | ||
1666 | </div> | ||
1667 | """ | ||
1668 | |||
1669 | def setUp(self): | ||
1670 | self.soup = BeautifulSoup(self.HTML, 'html.parser') | ||
1671 | |||
1672 | def assertSelects(self, selector, expected_ids): | ||
1673 | el_ids = [el['id'] for el in self.soup.select(selector)] | ||
1674 | el_ids.sort() | ||
1675 | expected_ids.sort() | ||
1676 | self.assertEqual(expected_ids, el_ids, | ||
1677 | "Selector %s, expected [%s], got [%s]" % ( | ||
1678 | selector, ', '.join(expected_ids), ', '.join(el_ids) | ||
1679 | ) | ||
1680 | ) | ||
1681 | |||
1682 | assertSelect = assertSelects | ||
1683 | |||
1684 | def assertSelectMultiple(self, *tests): | ||
1685 | for selector, expected_ids in tests: | ||
1686 | self.assertSelect(selector, expected_ids) | ||
1687 | |||
1688 | def test_one_tag_one(self): | ||
1689 | els = self.soup.select('title') | ||
1690 | self.assertEqual(len(els), 1) | ||
1691 | self.assertEqual(els[0].name, 'title') | ||
1692 | self.assertEqual(els[0].contents, ['The title']) | ||
1693 | |||
1694 | def test_one_tag_many(self): | ||
1695 | els = self.soup.select('div') | ||
1696 | self.assertEqual(len(els), 4) | ||
1697 | for div in els: | ||
1698 | self.assertEqual(div.name, 'div') | ||
1699 | |||
1700 | el = self.soup.select_one('div') | ||
1701 | self.assertEqual('main', el['id']) | ||
1702 | |||
1703 | def test_select_one_returns_none_if_no_match(self): | ||
1704 | match = self.soup.select_one('nonexistenttag') | ||
1705 | self.assertEqual(None, match) | ||
1706 | |||
1707 | |||
1708 | def test_tag_in_tag_one(self): | ||
1709 | els = self.soup.select('div div') | ||
1710 | self.assertSelects('div div', ['inner', 'data1']) | ||
1711 | |||
1712 | def test_tag_in_tag_many(self): | ||
1713 | for selector in ('html div', 'html body div', 'body div'): | ||
1714 | self.assertSelects(selector, ['data1', 'main', 'inner', 'footer']) | ||
1715 | |||
1716 | def test_tag_no_match(self): | ||
1717 | self.assertEqual(len(self.soup.select('del')), 0) | ||
1718 | |||
1719 | def test_invalid_tag(self): | ||
1720 | self.assertRaises(ValueError, self.soup.select, 'tag%t') | ||
1721 | |||
1722 | def test_select_dashed_tag_ids(self): | ||
1723 | self.assertSelects('custom-dashed-tag', ['dash1', 'dash2']) | ||
1724 | |||
1725 | def test_select_dashed_by_id(self): | ||
1726 | dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]') | ||
1727 | self.assertEqual(dashed[0].name, 'custom-dashed-tag') | ||
1728 | self.assertEqual(dashed[0]['id'], 'dash2') | ||
1729 | |||
1730 | def test_dashed_tag_text(self): | ||
1731 | self.assertEqual(self.soup.select('body > custom-dashed-tag')[0].text, 'Hello there.') | ||
1732 | |||
1733 | def test_select_dashed_matches_find_all(self): | ||
1734 | self.assertEqual(self.soup.select('custom-dashed-tag'), self.soup.find_all('custom-dashed-tag')) | ||
1735 | |||
1736 | def test_header_tags(self): | ||
1737 | self.assertSelectMultiple( | ||
1738 | ('h1', ['header1']), | ||
1739 | ('h2', ['header2', 'header3']), | ||
1740 | ) | ||
1741 | |||
1742 | def test_class_one(self): | ||
1743 | for selector in ('.onep', 'p.onep', 'html p.onep'): | ||
1744 | els = self.soup.select(selector) | ||
1745 | self.assertEqual(len(els), 1) | ||
1746 | self.assertEqual(els[0].name, 'p') | ||
1747 | self.assertEqual(els[0]['class'], ['onep']) | ||
1748 | |||
1749 | def test_class_mismatched_tag(self): | ||
1750 | els = self.soup.select('div.onep') | ||
1751 | self.assertEqual(len(els), 0) | ||
1752 | |||
1753 | def test_one_id(self): | ||
1754 | for selector in ('div#inner', '#inner', 'div div#inner'): | ||
1755 | self.assertSelects(selector, ['inner']) | ||
1756 | |||
1757 | def test_bad_id(self): | ||
1758 | els = self.soup.select('#doesnotexist') | ||
1759 | self.assertEqual(len(els), 0) | ||
1760 | |||
1761 | def test_items_in_id(self): | ||
1762 | els = self.soup.select('div#inner p') | ||
1763 | self.assertEqual(len(els), 3) | ||
1764 | for el in els: | ||
1765 | self.assertEqual(el.name, 'p') | ||
1766 | self.assertEqual(els[1]['class'], ['onep']) | ||
1767 | self.assertFalse(els[0].has_attr('class')) | ||
1768 | |||
1769 | def test_a_bunch_of_emptys(self): | ||
1770 | for selector in ('div#main del', 'div#main div.oops', 'div div#main'): | ||
1771 | self.assertEqual(len(self.soup.select(selector)), 0) | ||
1772 | |||
1773 | def test_multi_class_support(self): | ||
1774 | for selector in ('.class1', 'p.class1', '.class2', 'p.class2', | ||
1775 | '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'): | ||
1776 | self.assertSelects(selector, ['pmulti']) | ||
1777 | |||
1778 | def test_multi_class_selection(self): | ||
1779 | for selector in ('.class1.class3', '.class3.class2', | ||
1780 | '.class1.class2.class3'): | ||
1781 | self.assertSelects(selector, ['pmulti']) | ||
1782 | |||
1783 | def test_child_selector(self): | ||
1784 | self.assertSelects('.s1 > a', ['s1a1', 's1a2']) | ||
1785 | self.assertSelects('.s1 > a span', ['s1a2s1']) | ||
1786 | |||
1787 | def test_child_selector_id(self): | ||
1788 | self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1']) | ||
1789 | |||
1790 | def test_attribute_equals(self): | ||
1791 | self.assertSelectMultiple( | ||
1792 | ('p[class="onep"]', ['p1']), | ||
1793 | ('p[id="p1"]', ['p1']), | ||
1794 | ('[class="onep"]', ['p1']), | ||
1795 | ('[id="p1"]', ['p1']), | ||
1796 | ('link[rel="stylesheet"]', ['l1']), | ||
1797 | ('link[type="text/css"]', ['l1']), | ||
1798 | ('link[href="blah.css"]', ['l1']), | ||
1799 | ('link[href="no-blah.css"]', []), | ||
1800 | ('[rel="stylesheet"]', ['l1']), | ||
1801 | ('[type="text/css"]', ['l1']), | ||
1802 | ('[href="blah.css"]', ['l1']), | ||
1803 | ('[href="no-blah.css"]', []), | ||
1804 | ('p[href="no-blah.css"]', []), | ||
1805 | ('[href="no-blah.css"]', []), | ||
1806 | ) | ||
1807 | |||
1808 | def test_attribute_tilde(self): | ||
1809 | self.assertSelectMultiple( | ||
1810 | ('p[class~="class1"]', ['pmulti']), | ||
1811 | ('p[class~="class2"]', ['pmulti']), | ||
1812 | ('p[class~="class3"]', ['pmulti']), | ||
1813 | ('[class~="class1"]', ['pmulti']), | ||
1814 | ('[class~="class2"]', ['pmulti']), | ||
1815 | ('[class~="class3"]', ['pmulti']), | ||
1816 | ('a[rel~="friend"]', ['bob']), | ||
1817 | ('a[rel~="met"]', ['bob']), | ||
1818 | ('[rel~="friend"]', ['bob']), | ||
1819 | ('[rel~="met"]', ['bob']), | ||
1820 | ) | ||
1821 | |||
1822 | def test_attribute_startswith(self): | ||
1823 | self.assertSelectMultiple( | ||
1824 | ('[rel^="style"]', ['l1']), | ||
1825 | ('link[rel^="style"]', ['l1']), | ||
1826 | ('notlink[rel^="notstyle"]', []), | ||
1827 | ('[rel^="notstyle"]', []), | ||
1828 | ('link[rel^="notstyle"]', []), | ||
1829 | ('link[href^="bla"]', ['l1']), | ||
1830 | ('a[href^="http://"]', ['bob', 'me']), | ||
1831 | ('[href^="http://"]', ['bob', 'me']), | ||
1832 | ('[id^="p"]', ['pmulti', 'p1']), | ||
1833 | ('[id^="m"]', ['me', 'main']), | ||
1834 | ('div[id^="m"]', ['main']), | ||
1835 | ('a[id^="m"]', ['me']), | ||
1836 | ('div[data-tag^="dashed"]', ['data1']) | ||
1837 | ) | ||
1838 | |||
1839 | def test_attribute_endswith(self): | ||
1840 | self.assertSelectMultiple( | ||
1841 | ('[href$=".css"]', ['l1']), | ||
1842 | ('link[href$=".css"]', ['l1']), | ||
1843 | ('link[id$="1"]', ['l1']), | ||
1844 | ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']), | ||
1845 | ('div[id$="1"]', ['data1']), | ||
1846 | ('[id$="noending"]', []), | ||
1847 | ) | ||
1848 | |||
1849 | def test_attribute_contains(self): | ||
1850 | self.assertSelectMultiple( | ||
1851 | # From test_attribute_startswith | ||
1852 | ('[rel*="style"]', ['l1']), | ||
1853 | ('link[rel*="style"]', ['l1']), | ||
1854 | ('notlink[rel*="notstyle"]', []), | ||
1855 | ('[rel*="notstyle"]', []), | ||
1856 | ('link[rel*="notstyle"]', []), | ||
1857 | ('link[href*="bla"]', ['l1']), | ||
1858 | ('[href*="http://"]', ['bob', 'me']), | ||
1859 | ('[id*="p"]', ['pmulti', 'p1']), | ||
1860 | ('div[id*="m"]', ['main']), | ||
1861 | ('a[id*="m"]', ['me']), | ||
1862 | # From test_attribute_endswith | ||
1863 | ('[href*=".css"]', ['l1']), | ||
1864 | ('link[href*=".css"]', ['l1']), | ||
1865 | ('link[id*="1"]', ['l1']), | ||
1866 | ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']), | ||
1867 | ('div[id*="1"]', ['data1']), | ||
1868 | ('[id*="noending"]', []), | ||
1869 | # New for this test | ||
1870 | ('[href*="."]', ['bob', 'me', 'l1']), | ||
1871 | ('a[href*="."]', ['bob', 'me']), | ||
1872 | ('link[href*="."]', ['l1']), | ||
1873 | ('div[id*="n"]', ['main', 'inner']), | ||
1874 | ('div[id*="nn"]', ['inner']), | ||
1875 | ('div[data-tag*="edval"]', ['data1']) | ||
1876 | ) | ||
1877 | |||
1878 | def test_attribute_exact_or_hypen(self): | ||
1879 | self.assertSelectMultiple( | ||
1880 | ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), | ||
1881 | ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']), | ||
1882 | ('p[lang|="fr"]', ['lang-fr']), | ||
1883 | ('p[lang|="gb"]', []), | ||
1884 | ) | ||
1885 | |||
1886 | def test_attribute_exists(self): | ||
1887 | self.assertSelectMultiple( | ||
1888 | ('[rel]', ['l1', 'bob', 'me']), | ||
1889 | ('link[rel]', ['l1']), | ||
1890 | ('a[rel]', ['bob', 'me']), | ||
1891 | ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']), | ||
1892 | ('p[class]', ['p1', 'pmulti']), | ||
1893 | ('[blah]', []), | ||
1894 | ('p[blah]', []), | ||
1895 | ('div[data-tag]', ['data1']) | ||
1896 | ) | ||
1897 | |||
1898 | def test_unsupported_pseudoclass(self): | ||
1899 | self.assertRaises( | ||
1900 | NotImplementedError, self.soup.select, "a:no-such-pseudoclass") | ||
1901 | |||
1902 | self.assertRaises( | ||
1903 | NotImplementedError, self.soup.select, "a:nth-of-type(a)") | ||
1904 | |||
1905 | |||
1906 | def test_nth_of_type(self): | ||
1907 | # Try to select first paragraph | ||
1908 | els = self.soup.select('div#inner p:nth-of-type(1)') | ||
1909 | self.assertEqual(len(els), 1) | ||
1910 | self.assertEqual(els[0].string, 'Some text') | ||
1911 | |||
1912 | # Try to select third paragraph | ||
1913 | els = self.soup.select('div#inner p:nth-of-type(3)') | ||
1914 | self.assertEqual(len(els), 1) | ||
1915 | self.assertEqual(els[0].string, 'Another') | ||
1916 | |||
1917 | # Try to select (non-existent!) fourth paragraph | ||
1918 | els = self.soup.select('div#inner p:nth-of-type(4)') | ||
1919 | self.assertEqual(len(els), 0) | ||
1920 | |||
1921 | # Pass in an invalid value. | ||
1922 | self.assertRaises( | ||
1923 | ValueError, self.soup.select, 'div p:nth-of-type(0)') | ||
1924 | |||
1925 | def test_nth_of_type_direct_descendant(self): | ||
1926 | els = self.soup.select('div#inner > p:nth-of-type(1)') | ||
1927 | self.assertEqual(len(els), 1) | ||
1928 | self.assertEqual(els[0].string, 'Some text') | ||
1929 | |||
1930 | def test_id_child_selector_nth_of_type(self): | ||
1931 | self.assertSelects('#inner > p:nth-of-type(2)', ['p1']) | ||
1932 | |||
1933 | def test_select_on_element(self): | ||
1934 | # Other tests operate on the tree; this operates on an element | ||
1935 | # within the tree. | ||
1936 | inner = self.soup.find("div", id="main") | ||
1937 | selected = inner.select("div") | ||
1938 | # The <div id="inner"> tag was selected. The <div id="footer"> | ||
1939 | # tag was not. | ||
1940 | self.assertSelectsIDs(selected, ['inner', 'data1']) | ||
1941 | |||
1942 | def test_overspecified_child_id(self): | ||
1943 | self.assertSelects(".fancy #inner", ['inner']) | ||
1944 | self.assertSelects(".normal #inner", []) | ||
1945 | |||
1946 | def test_adjacent_sibling_selector(self): | ||
1947 | self.assertSelects('#p1 + h2', ['header2']) | ||
1948 | self.assertSelects('#p1 + h2 + p', ['pmulti']) | ||
1949 | self.assertSelects('#p1 + #header2 + .class1', ['pmulti']) | ||
1950 | self.assertEqual([], self.soup.select('#p1 + p')) | ||
1951 | |||
1952 | def test_general_sibling_selector(self): | ||
1953 | self.assertSelects('#p1 ~ h2', ['header2', 'header3']) | ||
1954 | self.assertSelects('#p1 ~ #header2', ['header2']) | ||
1955 | self.assertSelects('#p1 ~ h2 + a', ['me']) | ||
1956 | self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me']) | ||
1957 | self.assertEqual([], self.soup.select('#inner ~ h2')) | ||
1958 | |||
1959 | def test_dangling_combinator(self): | ||
1960 | self.assertRaises(ValueError, self.soup.select, 'h1 >') | ||
1961 | |||
1962 | def test_sibling_combinator_wont_select_same_tag_twice(self): | ||
1963 | self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr']) | ||
1964 | |||
1965 | # Test the selector grouping operator (the comma) | ||
1966 | def test_multiple_select(self): | ||
1967 | self.assertSelects('x, y', ['xid', 'yid']) | ||
1968 | |||
1969 | def test_multiple_select_with_no_space(self): | ||
1970 | self.assertSelects('x,y', ['xid', 'yid']) | ||
1971 | |||
1972 | def test_multiple_select_with_more_space(self): | ||
1973 | self.assertSelects('x, y', ['xid', 'yid']) | ||
1974 | |||
1975 | def test_multiple_select_duplicated(self): | ||
1976 | self.assertSelects('x, x', ['xid']) | ||
1977 | |||
1978 | def test_multiple_select_sibling(self): | ||
1979 | self.assertSelects('x, y ~ p[lang=fr]', ['xid', 'lang-fr']) | ||
1980 | |||
1981 | def test_multiple_select_tag_and_direct_descendant(self): | ||
1982 | self.assertSelects('x, y > z', ['xid', 'zidb']) | ||
1983 | |||
1984 | def test_multiple_select_direct_descendant_and_tags(self): | ||
1985 | self.assertSelects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) | ||
1986 | |||
1987 | def test_multiple_select_indirect_descendant(self): | ||
1988 | self.assertSelects('div x,y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac']) | ||
1989 | |||
1990 | def test_invalid_multiple_select(self): | ||
1991 | self.assertRaises(ValueError, self.soup.select, ',x, y') | ||
1992 | self.assertRaises(ValueError, self.soup.select, 'x,,y') | ||
1993 | |||
1994 | def test_multiple_select_attrs(self): | ||
1995 | self.assertSelects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb']) | ||
1996 | |||
1997 | def test_multiple_select_ids(self): | ||
1998 | self.assertSelects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab']) | ||
1999 | |||
2000 | def test_multiple_select_nested(self): | ||
2001 | self.assertSelects('body > div > x, y > z', ['xid', 'zidb']) | ||
2002 | |||
2003 | |||
2004 | |||
diff --git a/bitbake/lib/hashserv/__init__.py b/bitbake/lib/hashserv/__init__.py index 74367eb6b4..ac891e0174 100644 --- a/bitbake/lib/hashserv/__init__.py +++ b/bitbake/lib/hashserv/__init__.py | |||
@@ -13,6 +13,7 @@ from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS | |||
13 | 13 | ||
14 | User = namedtuple("User", ("username", "permissions")) | 14 | User = namedtuple("User", ("username", "permissions")) |
15 | 15 | ||
16 | |||
16 | def create_server( | 17 | def create_server( |
17 | addr, | 18 | addr, |
18 | dbname, | 19 | dbname, |
@@ -25,6 +26,7 @@ def create_server( | |||
25 | anon_perms=None, | 26 | anon_perms=None, |
26 | admin_username=None, | 27 | admin_username=None, |
27 | admin_password=None, | 28 | admin_password=None, |
29 | reuseport=False, | ||
28 | ): | 30 | ): |
29 | def sqlite_engine(): | 31 | def sqlite_engine(): |
30 | from .sqlite import DatabaseEngine | 32 | from .sqlite import DatabaseEngine |
@@ -60,9 +62,9 @@ def create_server( | |||
60 | s.start_unix_server(*a) | 62 | s.start_unix_server(*a) |
61 | elif typ == ADDR_TYPE_WS: | 63 | elif typ == ADDR_TYPE_WS: |
62 | url = urlparse(a[0]) | 64 | url = urlparse(a[0]) |
63 | s.start_websocket_server(url.hostname, url.port) | 65 | s.start_websocket_server(url.hostname, url.port, reuseport=reuseport) |
64 | else: | 66 | else: |
65 | s.start_tcp_server(*a) | 67 | s.start_tcp_server(*a, reuseport=reuseport) |
66 | 68 | ||
67 | return s | 69 | return s |
68 | 70 | ||
diff --git a/bitbake/lib/hashserv/client.py b/bitbake/lib/hashserv/client.py index 0b254beddd..a510f3284f 100644 --- a/bitbake/lib/hashserv/client.py +++ b/bitbake/lib/hashserv/client.py | |||
@@ -5,6 +5,7 @@ | |||
5 | 5 | ||
6 | import logging | 6 | import logging |
7 | import socket | 7 | import socket |
8 | import asyncio | ||
8 | import bb.asyncrpc | 9 | import bb.asyncrpc |
9 | import json | 10 | import json |
10 | from . import create_async_client | 11 | from . import create_async_client |
@@ -13,6 +14,66 @@ from . import create_async_client | |||
13 | logger = logging.getLogger("hashserv.client") | 14 | logger = logging.getLogger("hashserv.client") |
14 | 15 | ||
15 | 16 | ||
17 | class Batch(object): | ||
18 | def __init__(self): | ||
19 | self.done = False | ||
20 | self.cond = asyncio.Condition() | ||
21 | self.pending = [] | ||
22 | self.results = [] | ||
23 | self.sent_count = 0 | ||
24 | |||
25 | async def recv(self, socket): | ||
26 | while True: | ||
27 | async with self.cond: | ||
28 | await self.cond.wait_for(lambda: self.pending or self.done) | ||
29 | |||
30 | if not self.pending: | ||
31 | if self.done: | ||
32 | return | ||
33 | continue | ||
34 | |||
35 | r = await socket.recv() | ||
36 | self.results.append(r) | ||
37 | |||
38 | async with self.cond: | ||
39 | self.pending.pop(0) | ||
40 | |||
41 | async def send(self, socket, msgs): | ||
42 | try: | ||
43 | # In the event of a restart due to a reconnect, all in-flight | ||
44 | # messages need to be resent first to keep to result count in sync | ||
45 | for m in self.pending: | ||
46 | await socket.send(m) | ||
47 | |||
48 | for m in msgs: | ||
49 | # Add the message to the pending list before attempting to send | ||
50 | # it so that if the send fails it will be retried | ||
51 | async with self.cond: | ||
52 | self.pending.append(m) | ||
53 | self.cond.notify() | ||
54 | self.sent_count += 1 | ||
55 | |||
56 | await socket.send(m) | ||
57 | |||
58 | finally: | ||
59 | async with self.cond: | ||
60 | self.done = True | ||
61 | self.cond.notify() | ||
62 | |||
63 | async def process(self, socket, msgs): | ||
64 | await asyncio.gather( | ||
65 | self.recv(socket), | ||
66 | self.send(socket, msgs), | ||
67 | ) | ||
68 | |||
69 | if len(self.results) != self.sent_count: | ||
70 | raise ValueError( | ||
71 | f"Expected result count {len(self.results)}. Expected {self.sent_count}" | ||
72 | ) | ||
73 | |||
74 | return self.results | ||
75 | |||
76 | |||
16 | class AsyncClient(bb.asyncrpc.AsyncClient): | 77 | class AsyncClient(bb.asyncrpc.AsyncClient): |
17 | MODE_NORMAL = 0 | 78 | MODE_NORMAL = 0 |
18 | MODE_GET_STREAM = 1 | 79 | MODE_GET_STREAM = 1 |
@@ -36,32 +97,52 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
36 | if become: | 97 | if become: |
37 | await self.become_user(become) | 98 | await self.become_user(become) |
38 | 99 | ||
39 | async def send_stream(self, mode, msg): | 100 | async def send_stream_batch(self, mode, msgs): |
101 | """ | ||
102 | Does a "batch" process of stream messages. This sends the query | ||
103 | messages as fast as possible, and simultaneously attempts to read the | ||
104 | messages back. This helps to mitigate the effects of latency to the | ||
105 | hash equivalence server be allowing multiple queries to be "in-flight" | ||
106 | at once | ||
107 | |||
108 | The implementation does more complicated tracking using a count of sent | ||
109 | messages so that `msgs` can be a generator function (i.e. its length is | ||
110 | unknown) | ||
111 | |||
112 | """ | ||
113 | |||
114 | b = Batch() | ||
115 | |||
40 | async def proc(): | 116 | async def proc(): |
117 | nonlocal b | ||
118 | |||
41 | await self._set_mode(mode) | 119 | await self._set_mode(mode) |
42 | await self.socket.send(msg) | 120 | return await b.process(self.socket, msgs) |
43 | return await self.socket.recv() | ||
44 | 121 | ||
45 | return await self._send_wrapper(proc) | 122 | return await self._send_wrapper(proc) |
46 | 123 | ||
47 | async def invoke(self, *args, **kwargs): | 124 | async def invoke(self, *args, skip_mode=False, **kwargs): |
48 | # It's OK if connection errors cause a failure here, because the mode | 125 | # It's OK if connection errors cause a failure here, because the mode |
49 | # is also reset to normal on a new connection | 126 | # is also reset to normal on a new connection |
50 | await self._set_mode(self.MODE_NORMAL) | 127 | if not skip_mode: |
128 | await self._set_mode(self.MODE_NORMAL) | ||
51 | return await super().invoke(*args, **kwargs) | 129 | return await super().invoke(*args, **kwargs) |
52 | 130 | ||
53 | async def _set_mode(self, new_mode): | 131 | async def _set_mode(self, new_mode): |
54 | async def stream_to_normal(): | 132 | async def stream_to_normal(): |
133 | # Check if already in normal mode (e.g. due to a connection reset) | ||
134 | if self.mode == self.MODE_NORMAL: | ||
135 | return "ok" | ||
55 | await self.socket.send("END") | 136 | await self.socket.send("END") |
56 | return await self.socket.recv() | 137 | return await self.socket.recv() |
57 | 138 | ||
58 | async def normal_to_stream(command): | 139 | async def normal_to_stream(command): |
59 | r = await self.invoke({command: None}) | 140 | r = await self.invoke({command: None}, skip_mode=True) |
60 | if r != "ok": | 141 | if r != "ok": |
142 | self.check_invoke_error(r) | ||
61 | raise ConnectionError( | 143 | raise ConnectionError( |
62 | f"Unable to transition to stream mode: Bad response from server {r!r}" | 144 | f"Unable to transition to stream mode: Bad response from server {r!r}" |
63 | ) | 145 | ) |
64 | |||
65 | self.logger.debug("Mode is now %s", command) | 146 | self.logger.debug("Mode is now %s", command) |
66 | 147 | ||
67 | if new_mode == self.mode: | 148 | if new_mode == self.mode: |
@@ -89,10 +170,15 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
89 | self.mode = new_mode | 170 | self.mode = new_mode |
90 | 171 | ||
91 | async def get_unihash(self, method, taskhash): | 172 | async def get_unihash(self, method, taskhash): |
92 | r = await self.send_stream(self.MODE_GET_STREAM, "%s %s" % (method, taskhash)) | 173 | r = await self.get_unihash_batch([(method, taskhash)]) |
93 | if not r: | 174 | return r[0] |
94 | return None | 175 | |
95 | return r | 176 | async def get_unihash_batch(self, args): |
177 | result = await self.send_stream_batch( | ||
178 | self.MODE_GET_STREAM, | ||
179 | (f"{method} {taskhash}" for method, taskhash in args), | ||
180 | ) | ||
181 | return [r if r else None for r in result] | ||
96 | 182 | ||
97 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): | 183 | async def report_unihash(self, taskhash, method, outhash, unihash, extra={}): |
98 | m = extra.copy() | 184 | m = extra.copy() |
@@ -115,8 +201,12 @@ class AsyncClient(bb.asyncrpc.AsyncClient): | |||
115 | ) | 201 | ) |
116 | 202 | ||
117 | async def unihash_exists(self, unihash): | 203 | async def unihash_exists(self, unihash): |
118 | r = await self.send_stream(self.MODE_EXIST_STREAM, unihash) | 204 | r = await self.unihash_exists_batch([unihash]) |
119 | return r == "true" | 205 | return r[0] |
206 | |||
207 | async def unihash_exists_batch(self, unihashes): | ||
208 | result = await self.send_stream_batch(self.MODE_EXIST_STREAM, unihashes) | ||
209 | return [r == "true" for r in result] | ||
120 | 210 | ||
121 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): | 211 | async def get_outhash(self, method, outhash, taskhash, with_unihash=True): |
122 | return await self.invoke( | 212 | return await self.invoke( |
@@ -237,10 +327,12 @@ class Client(bb.asyncrpc.Client): | |||
237 | "connect_tcp", | 327 | "connect_tcp", |
238 | "connect_websocket", | 328 | "connect_websocket", |
239 | "get_unihash", | 329 | "get_unihash", |
330 | "get_unihash_batch", | ||
240 | "report_unihash", | 331 | "report_unihash", |
241 | "report_unihash_equiv", | 332 | "report_unihash_equiv", |
242 | "get_taskhash", | 333 | "get_taskhash", |
243 | "unihash_exists", | 334 | "unihash_exists", |
335 | "unihash_exists_batch", | ||
244 | "get_outhash", | 336 | "get_outhash", |
245 | "get_stats", | 337 | "get_stats", |
246 | "reset_stats", | 338 | "reset_stats", |
@@ -264,83 +356,3 @@ class Client(bb.asyncrpc.Client): | |||
264 | 356 | ||
265 | def _get_async_client(self): | 357 | def _get_async_client(self): |
266 | return AsyncClient(self.username, self.password) | 358 | return AsyncClient(self.username, self.password) |
267 | |||
268 | |||
269 | class ClientPool(bb.asyncrpc.ClientPool): | ||
270 | def __init__( | ||
271 | self, | ||
272 | address, | ||
273 | max_clients, | ||
274 | *, | ||
275 | username=None, | ||
276 | password=None, | ||
277 | become=None, | ||
278 | ): | ||
279 | super().__init__(max_clients) | ||
280 | self.address = address | ||
281 | self.username = username | ||
282 | self.password = password | ||
283 | self.become = become | ||
284 | |||
285 | async def _new_client(self): | ||
286 | client = await create_async_client( | ||
287 | self.address, | ||
288 | username=self.username, | ||
289 | password=self.password, | ||
290 | ) | ||
291 | if self.become: | ||
292 | await client.become_user(self.become) | ||
293 | return client | ||
294 | |||
295 | def _run_key_tasks(self, queries, call): | ||
296 | results = {key: None for key in queries.keys()} | ||
297 | |||
298 | def make_task(key, args): | ||
299 | async def task(client): | ||
300 | nonlocal results | ||
301 | unihash = await call(client, args) | ||
302 | results[key] = unihash | ||
303 | |||
304 | return task | ||
305 | |||
306 | def gen_tasks(): | ||
307 | for key, args in queries.items(): | ||
308 | yield make_task(key, args) | ||
309 | |||
310 | self.run_tasks(gen_tasks()) | ||
311 | return results | ||
312 | |||
313 | def get_unihashes(self, queries): | ||
314 | """ | ||
315 | Query multiple unihashes in parallel. | ||
316 | |||
317 | The queries argument is a dictionary with arbitrary key. The values | ||
318 | must be a tuple of (method, taskhash). | ||
319 | |||
320 | Returns a dictionary with a corresponding key for each input key, and | ||
321 | the value is the queried unihash (which might be none if the query | ||
322 | failed) | ||
323 | """ | ||
324 | |||
325 | async def call(client, args): | ||
326 | method, taskhash = args | ||
327 | return await client.get_unihash(method, taskhash) | ||
328 | |||
329 | return self._run_key_tasks(queries, call) | ||
330 | |||
331 | def unihashes_exist(self, queries): | ||
332 | """ | ||
333 | Query multiple unihash existence checks in parallel. | ||
334 | |||
335 | The queries argument is a dictionary with arbitrary key. The values | ||
336 | must be a unihash. | ||
337 | |||
338 | Returns a dictionary with a corresponding key for each input key, and | ||
339 | the value is True or False if the unihash is known by the server (or | ||
340 | None if there was a failure) | ||
341 | """ | ||
342 | |||
343 | async def call(client, unihash): | ||
344 | return await client.unihash_exists(unihash) | ||
345 | |||
346 | return self._run_key_tasks(queries, call) | ||
diff --git a/bitbake/lib/hashserv/tests.py b/bitbake/lib/hashserv/tests.py index 0809453cf8..13ccb20ebf 100644 --- a/bitbake/lib/hashserv/tests.py +++ b/bitbake/lib/hashserv/tests.py | |||
@@ -8,7 +8,6 @@ | |||
8 | from . import create_server, create_client | 8 | from . import create_server, create_client |
9 | from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS | 9 | from .server import DEFAULT_ANON_PERMS, ALL_PERMISSIONS |
10 | from bb.asyncrpc import InvokeError | 10 | from bb.asyncrpc import InvokeError |
11 | from .client import ClientPool | ||
12 | import hashlib | 11 | import hashlib |
13 | import logging | 12 | import logging |
14 | import multiprocessing | 13 | import multiprocessing |
@@ -94,9 +93,6 @@ class HashEquivalenceTestSetup(object): | |||
94 | return self.start_client(self.auth_server_address, user["username"], user["token"]) | 93 | return self.start_client(self.auth_server_address, user["username"], user["token"]) |
95 | 94 | ||
96 | def setUp(self): | 95 | def setUp(self): |
97 | if sys.version_info < (3, 5, 0): | ||
98 | self.skipTest('Python 3.5 or later required') | ||
99 | |||
100 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') | 96 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-hashserv') |
101 | self.addCleanup(self.temp_dir.cleanup) | 97 | self.addCleanup(self.temp_dir.cleanup) |
102 | 98 | ||
@@ -555,8 +551,7 @@ class HashEquivalenceCommonTests(object): | |||
555 | # shares a taskhash with Task 2 | 551 | # shares a taskhash with Task 2 |
556 | self.assertClientGetHash(self.client, taskhash2, unihash2) | 552 | self.assertClientGetHash(self.client, taskhash2, unihash2) |
557 | 553 | ||
558 | 554 | def test_get_unihash_batch(self): | |
559 | def test_client_pool_get_unihashes(self): | ||
560 | TEST_INPUT = ( | 555 | TEST_INPUT = ( |
561 | # taskhash outhash unihash | 556 | # taskhash outhash unihash |
562 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | 557 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), |
@@ -573,28 +568,27 @@ class HashEquivalenceCommonTests(object): | |||
573 | "6b6be7a84ab179b4240c4302518dc3f6", | 568 | "6b6be7a84ab179b4240c4302518dc3f6", |
574 | ) | 569 | ) |
575 | 570 | ||
576 | with ClientPool(self.server_address, 10) as client_pool: | 571 | for taskhash, outhash, unihash in TEST_INPUT: |
577 | for taskhash, outhash, unihash in TEST_INPUT: | 572 | self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
578 | self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | ||
579 | |||
580 | query = {idx: (self.METHOD, data[0]) for idx, data in enumerate(TEST_INPUT)} | ||
581 | for idx, taskhash in enumerate(EXTRA_QUERIES): | ||
582 | query[idx + len(TEST_INPUT)] = (self.METHOD, taskhash) | ||
583 | |||
584 | result = client_pool.get_unihashes(query) | ||
585 | |||
586 | self.assertDictEqual(result, { | ||
587 | 0: "218e57509998197d570e2c98512d0105985dffc9", | ||
588 | 1: "218e57509998197d570e2c98512d0105985dffc9", | ||
589 | 2: "218e57509998197d570e2c98512d0105985dffc9", | ||
590 | 3: "3b5d3d83f07f259e9086fcb422c855286e18a57d", | ||
591 | 4: "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
592 | 5: "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
593 | 6: "05d2a63c81e32f0a36542ca677e8ad852365c538", | ||
594 | 7: None, | ||
595 | }) | ||
596 | 573 | ||
597 | def test_client_pool_unihash_exists(self): | 574 | |
575 | result = self.client.get_unihash_batch( | ||
576 | [(self.METHOD, data[0]) for data in TEST_INPUT] + | ||
577 | [(self.METHOD, e) for e in EXTRA_QUERIES] | ||
578 | ) | ||
579 | |||
580 | self.assertListEqual(result, [ | ||
581 | "218e57509998197d570e2c98512d0105985dffc9", | ||
582 | "218e57509998197d570e2c98512d0105985dffc9", | ||
583 | "218e57509998197d570e2c98512d0105985dffc9", | ||
584 | "3b5d3d83f07f259e9086fcb422c855286e18a57d", | ||
585 | "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
586 | "f46d3fbb439bd9b921095da657a4de906510d2cd", | ||
587 | "05d2a63c81e32f0a36542ca677e8ad852365c538", | ||
588 | None, | ||
589 | ]) | ||
590 | |||
591 | def test_unihash_exists_batch(self): | ||
598 | TEST_INPUT = ( | 592 | TEST_INPUT = ( |
599 | # taskhash outhash unihash | 593 | # taskhash outhash unihash |
600 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), | 594 | ('8aa96fcffb5831b3c2c0cb75f0431e3f8b20554a', 'afe240a439959ce86f5e322f8c208e1fedefea9e813f2140c81af866cc9edf7e','218e57509998197d570e2c98512d0105985dffc9'), |
@@ -614,28 +608,24 @@ class HashEquivalenceCommonTests(object): | |||
614 | result_unihashes = set() | 608 | result_unihashes = set() |
615 | 609 | ||
616 | 610 | ||
617 | with ClientPool(self.server_address, 10) as client_pool: | 611 | for taskhash, outhash, unihash in TEST_INPUT: |
618 | for taskhash, outhash, unihash in TEST_INPUT: | 612 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) |
619 | result = self.client.report_unihash(taskhash, self.METHOD, outhash, unihash) | 613 | result_unihashes.add(result["unihash"]) |
620 | result_unihashes.add(result["unihash"]) | ||
621 | |||
622 | query = {} | ||
623 | expected = {} | ||
624 | 614 | ||
625 | for _, _, unihash in TEST_INPUT: | 615 | query = [] |
626 | idx = len(query) | 616 | expected = [] |
627 | query[idx] = unihash | ||
628 | expected[idx] = unihash in result_unihashes | ||
629 | 617 | ||
618 | for _, _, unihash in TEST_INPUT: | ||
619 | query.append(unihash) | ||
620 | expected.append(unihash in result_unihashes) | ||
630 | 621 | ||
631 | for unihash in EXTRA_QUERIES: | ||
632 | idx = len(query) | ||
633 | query[idx] = unihash | ||
634 | expected[idx] = False | ||
635 | 622 | ||
636 | result = client_pool.unihashes_exist(query) | 623 | for unihash in EXTRA_QUERIES: |
637 | self.assertDictEqual(result, expected) | 624 | query.append(unihash) |
625 | expected.append(False) | ||
638 | 626 | ||
627 | result = self.client.unihash_exists_batch(query) | ||
628 | self.assertListEqual(result, expected) | ||
639 | 629 | ||
640 | def test_auth_read_perms(self): | 630 | def test_auth_read_perms(self): |
641 | admin_client = self.start_auth_server() | 631 | admin_client = self.start_auth_server() |
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py index 0e0aa34d0e..a817b03c1e 100644 --- a/bitbake/lib/prserv/__init__.py +++ b/bitbake/lib/prserv/__init__.py | |||
@@ -4,17 +4,92 @@ | |||
4 | # SPDX-License-Identifier: GPL-2.0-only | 4 | # SPDX-License-Identifier: GPL-2.0-only |
5 | # | 5 | # |
6 | 6 | ||
7 | __version__ = "1.0.0" | ||
8 | 7 | ||
9 | import os, time | 8 | __version__ = "2.0.0" |
10 | import sys, logging | ||
11 | 9 | ||
12 | def init_logger(logfile, loglevel): | 10 | import logging |
13 | numeric_level = getattr(logging, loglevel.upper(), None) | 11 | logger = logging.getLogger("BitBake.PRserv") |
14 | if not isinstance(numeric_level, int): | ||
15 | raise ValueError("Invalid log level: %s" % loglevel) | ||
16 | FORMAT = "%(asctime)-15s %(message)s" | ||
17 | logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT) | ||
18 | 12 | ||
19 | class NotFoundError(Exception): | 13 | from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS |
20 | pass | 14 | |
15 | def create_server(addr, dbpath, upstream=None, read_only=False): | ||
16 | from . import serv | ||
17 | |||
18 | s = serv.PRServer(dbpath, upstream=upstream, read_only=read_only) | ||
19 | host, port = addr.split(":") | ||
20 | s.start_tcp_server(host, int(port)) | ||
21 | |||
22 | return s | ||
23 | |||
24 | def increase_revision(ver): | ||
25 | """Take a revision string such as "1" or "1.2.3" or even a number and increase its last number | ||
26 | This fails if the last number is not an integer""" | ||
27 | |||
28 | fields=str(ver).split('.') | ||
29 | last = fields[-1] | ||
30 | |||
31 | try: | ||
32 | val = int(last) | ||
33 | except Exception as e: | ||
34 | logger.critical("Unable to increase revision value %s: %s" % (ver, e)) | ||
35 | raise e | ||
36 | |||
37 | return ".".join(fields[0:-1] + list(str(val + 1))) | ||
38 | |||
39 | def _revision_greater_or_equal(rev1, rev2): | ||
40 | """Compares x.y.z revision numbers, using integer comparison | ||
41 | Returns True if rev1 is greater or equal to rev2""" | ||
42 | |||
43 | fields1 = rev1.split(".") | ||
44 | fields2 = rev2.split(".") | ||
45 | l1 = len(fields1) | ||
46 | l2 = len(fields2) | ||
47 | |||
48 | for i in range(l1): | ||
49 | val1 = int(fields1[i]) | ||
50 | if i < l2: | ||
51 | val2 = int(fields2[i]) | ||
52 | if val2 < val1: | ||
53 | return True | ||
54 | elif val2 > val1: | ||
55 | return False | ||
56 | else: | ||
57 | return True | ||
58 | return True | ||
59 | |||
60 | def revision_smaller(rev1, rev2): | ||
61 | """Compares x.y.z revision numbers, using integer comparison | ||
62 | Returns True if rev1 is strictly smaller than rev2""" | ||
63 | return not(_revision_greater_or_equal(rev1, rev2)) | ||
64 | |||
65 | def revision_greater(rev1, rev2): | ||
66 | """Compares x.y.z revision numbers, using integer comparison | ||
67 | Returns True if rev1 is strictly greater than rev2""" | ||
68 | return _revision_greater_or_equal(rev1, rev2) and (rev1 != rev2) | ||
69 | |||
70 | def create_client(addr): | ||
71 | from . import client | ||
72 | |||
73 | c = client.PRClient() | ||
74 | |||
75 | try: | ||
76 | (typ, a) = parse_address(addr) | ||
77 | c.connect_tcp(*a) | ||
78 | return c | ||
79 | except Exception as e: | ||
80 | c.close() | ||
81 | raise e | ||
82 | |||
83 | async def create_async_client(addr): | ||
84 | from . import client | ||
85 | |||
86 | c = client.PRAsyncClient() | ||
87 | |||
88 | try: | ||
89 | (typ, a) = parse_address(addr) | ||
90 | await c.connect_tcp(*a) | ||
91 | return c | ||
92 | |||
93 | except Exception as e: | ||
94 | await c.close() | ||
95 | raise e | ||
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py index 8471ee3046..9f5794c433 100644 --- a/bitbake/lib/prserv/client.py +++ b/bitbake/lib/prserv/client.py | |||
@@ -6,6 +6,7 @@ | |||
6 | 6 | ||
7 | import logging | 7 | import logging |
8 | import bb.asyncrpc | 8 | import bb.asyncrpc |
9 | from . import create_async_client | ||
9 | 10 | ||
10 | logger = logging.getLogger("BitBake.PRserv") | 11 | logger = logging.getLogger("BitBake.PRserv") |
11 | 12 | ||
@@ -13,16 +14,16 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
13 | def __init__(self): | 14 | def __init__(self): |
14 | super().__init__("PRSERVICE", "1.0", logger) | 15 | super().__init__("PRSERVICE", "1.0", logger) |
15 | 16 | ||
16 | async def getPR(self, version, pkgarch, checksum): | 17 | async def getPR(self, version, pkgarch, checksum, history=False): |
17 | response = await self.invoke( | 18 | response = await self.invoke( |
18 | {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | 19 | {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}} |
19 | ) | 20 | ) |
20 | if response: | 21 | if response: |
21 | return response["value"] | 22 | return response["value"] |
22 | 23 | ||
23 | async def test_pr(self, version, pkgarch, checksum): | 24 | async def test_pr(self, version, pkgarch, checksum, history=False): |
24 | response = await self.invoke( | 25 | response = await self.invoke( |
25 | {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} | 26 | {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}} |
26 | ) | 27 | ) |
27 | if response: | 28 | if response: |
28 | return response["value"] | 29 | return response["value"] |
@@ -48,9 +49,9 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
48 | if response: | 49 | if response: |
49 | return response["value"] | 50 | return response["value"] |
50 | 51 | ||
51 | async def export(self, version, pkgarch, checksum, colinfo): | 52 | async def export(self, version, pkgarch, checksum, colinfo, history=False): |
52 | response = await self.invoke( | 53 | response = await self.invoke( |
53 | {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}} | 54 | {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo, "history": history}} |
54 | ) | 55 | ) |
55 | if response: | 56 | if response: |
56 | return (response["metainfo"], response["datainfo"]) | 57 | return (response["metainfo"], response["datainfo"]) |
@@ -65,7 +66,7 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient): | |||
65 | class PRClient(bb.asyncrpc.Client): | 66 | class PRClient(bb.asyncrpc.Client): |
66 | def __init__(self): | 67 | def __init__(self): |
67 | super().__init__() | 68 | super().__init__() |
68 | self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly") | 69 | self._add_methods("getPR", "test_pr", "test_package", "max_package_pr", "importone", "export", "is_readonly") |
69 | 70 | ||
70 | def _get_async_client(self): | 71 | def _get_async_client(self): |
71 | return PRAsyncClient() | 72 | return PRAsyncClient() |
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py index eb41508198..2da493ddf5 100644 --- a/bitbake/lib/prserv/db.py +++ b/bitbake/lib/prserv/db.py | |||
@@ -8,19 +8,13 @@ import logging | |||
8 | import os.path | 8 | import os.path |
9 | import errno | 9 | import errno |
10 | import prserv | 10 | import prserv |
11 | import time | 11 | import sqlite3 |
12 | 12 | ||
13 | try: | 13 | from contextlib import closing |
14 | import sqlite3 | 14 | from . import increase_revision, revision_greater, revision_smaller |
15 | except ImportError: | ||
16 | from pysqlite2 import dbapi2 as sqlite3 | ||
17 | 15 | ||
18 | logger = logging.getLogger("BitBake.PRserv") | 16 | logger = logging.getLogger("BitBake.PRserv") |
19 | 17 | ||
20 | sqlversion = sqlite3.sqlite_version_info | ||
21 | if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | ||
22 | raise Exception("sqlite3 version 3.3.0 or later is required.") | ||
23 | |||
24 | # | 18 | # |
25 | # "No History" mode - for a given query tuple (version, pkgarch, checksum), | 19 | # "No History" mode - for a given query tuple (version, pkgarch, checksum), |
26 | # the returned value will be the largest among all the values of the same | 20 | # the returned value will be the largest among all the values of the same |
@@ -29,287 +23,232 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): | |||
29 | # "History" mode - Return a new higher value for previously unseen query | 23 | # "History" mode - Return a new higher value for previously unseen query |
30 | # tuple (version, pkgarch, checksum), otherwise return historical value. | 24 | # tuple (version, pkgarch, checksum), otherwise return historical value. |
31 | # Value can decrement if returning to a previous build. | 25 | # Value can decrement if returning to a previous build. |
32 | # | ||
33 | 26 | ||
34 | class PRTable(object): | 27 | class PRTable(object): |
35 | def __init__(self, conn, table, nohist, read_only): | 28 | def __init__(self, conn, table, read_only): |
36 | self.conn = conn | 29 | self.conn = conn |
37 | self.nohist = nohist | ||
38 | self.read_only = read_only | 30 | self.read_only = read_only |
39 | self.dirty = False | 31 | self.table = table |
40 | if nohist: | 32 | |
41 | self.table = "%s_nohist" % table | 33 | # Creating the table even if the server is read-only. |
42 | else: | 34 | # This avoids a race condition if a shared database |
43 | self.table = "%s_hist" % table | 35 | # is accessed by a read-only server first. |
44 | 36 | ||
45 | if self.read_only: | 37 | with closing(self.conn.cursor()) as cursor: |
46 | table_exists = self._execute( | 38 | cursor.execute("CREATE TABLE IF NOT EXISTS %s \ |
47 | "SELECT count(*) FROM sqlite_master \ | ||
48 | WHERE type='table' AND name='%s'" % (self.table)) | ||
49 | if not table_exists: | ||
50 | raise prserv.NotFoundError | ||
51 | else: | ||
52 | self._execute("CREATE TABLE IF NOT EXISTS %s \ | ||
53 | (version TEXT NOT NULL, \ | 39 | (version TEXT NOT NULL, \ |
54 | pkgarch TEXT NOT NULL, \ | 40 | pkgarch TEXT NOT NULL, \ |
55 | checksum TEXT NOT NULL, \ | 41 | checksum TEXT NOT NULL, \ |
56 | value INTEGER, \ | 42 | value TEXT, \ |
57 | PRIMARY KEY (version, pkgarch, checksum));" % self.table) | 43 | PRIMARY KEY (version, pkgarch, checksum, value));" % self.table) |
58 | |||
59 | def _execute(self, *query): | ||
60 | """Execute a query, waiting to acquire a lock if necessary""" | ||
61 | start = time.time() | ||
62 | end = start + 20 | ||
63 | while True: | ||
64 | try: | ||
65 | return self.conn.execute(*query) | ||
66 | except sqlite3.OperationalError as exc: | ||
67 | if "is locked" in str(exc) and end > time.time(): | ||
68 | continue | ||
69 | raise exc | ||
70 | |||
71 | def sync(self): | ||
72 | if not self.read_only: | ||
73 | self.conn.commit() | 44 | self.conn.commit() |
74 | self._execute("BEGIN EXCLUSIVE TRANSACTION") | ||
75 | 45 | ||
76 | def sync_if_dirty(self): | 46 | def _extremum_value(self, rows, is_max): |
77 | if self.dirty: | 47 | value = None |
78 | self.sync() | 48 | |
79 | self.dirty = False | 49 | for row in rows: |
50 | current_value = row[0] | ||
51 | if value is None: | ||
52 | value = current_value | ||
53 | else: | ||
54 | if is_max: | ||
55 | is_new_extremum = revision_greater(current_value, value) | ||
56 | else: | ||
57 | is_new_extremum = revision_smaller(current_value, value) | ||
58 | if is_new_extremum: | ||
59 | value = current_value | ||
60 | return value | ||
61 | |||
62 | def _max_value(self, rows): | ||
63 | return self._extremum_value(rows, True) | ||
64 | |||
65 | def _min_value(self, rows): | ||
66 | return self._extremum_value(rows, False) | ||
80 | 67 | ||
81 | def test_package(self, version, pkgarch): | 68 | def test_package(self, version, pkgarch): |
82 | """Returns whether the specified package version is found in the database for the specified architecture""" | 69 | """Returns whether the specified package version is found in the database for the specified architecture""" |
83 | 70 | ||
84 | # Just returns the value if found or None otherwise | 71 | # Just returns the value if found or None otherwise |
85 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, | 72 | with closing(self.conn.cursor()) as cursor: |
86 | (version, pkgarch)) | 73 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, |
87 | row=data.fetchone() | 74 | (version, pkgarch)) |
88 | if row is not None: | 75 | row=data.fetchone() |
89 | return True | 76 | if row is not None: |
90 | else: | 77 | return True |
91 | return False | 78 | else: |
79 | return False | ||
80 | |||
81 | def test_checksum_value(self, version, pkgarch, checksum, value): | ||
82 | """Returns whether the specified value is found in the database for the specified package, architecture and checksum""" | ||
83 | |||
84 | with closing(self.conn.cursor()) as cursor: | ||
85 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and checksum=? and value=?;" % self.table, | ||
86 | (version, pkgarch, checksum, value)) | ||
87 | row=data.fetchone() | ||
88 | if row is not None: | ||
89 | return True | ||
90 | else: | ||
91 | return False | ||
92 | 92 | ||
93 | def test_value(self, version, pkgarch, value): | 93 | def test_value(self, version, pkgarch, value): |
94 | """Returns whether the specified value is found in the database for the specified package and architecture""" | 94 | """Returns whether the specified value is found in the database for the specified package and architecture""" |
95 | 95 | ||
96 | # Just returns the value if found or None otherwise | 96 | # Just returns the value if found or None otherwise |
97 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, | 97 | with closing(self.conn.cursor()) as cursor: |
98 | (version, pkgarch, value)) | 98 | data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, |
99 | row=data.fetchone() | 99 | (version, pkgarch, value)) |
100 | if row is not None: | 100 | row=data.fetchone() |
101 | return True | 101 | if row is not None: |
102 | else: | 102 | return True |
103 | return False | 103 | else: |
104 | return False | ||
104 | 105 | ||
105 | def find_value(self, version, pkgarch, checksum): | 106 | |
107 | def find_package_max_value(self, version, pkgarch): | ||
108 | """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value""" | ||
109 | |||
110 | with closing(self.conn.cursor()) as cursor: | ||
111 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
112 | (version, pkgarch)) | ||
113 | rows = data.fetchall() | ||
114 | value = self._max_value(rows) | ||
115 | return value | ||
116 | |||
117 | def find_value(self, version, pkgarch, checksum, history=False): | ||
106 | """Returns the value for the specified checksum if found or None otherwise.""" | 118 | """Returns the value for the specified checksum if found or None otherwise.""" |
107 | 119 | ||
108 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 120 | if history: |
109 | (version, pkgarch, checksum)) | 121 | return self.find_min_value(version, pkgarch, checksum) |
110 | row=data.fetchone() | ||
111 | if row is not None: | ||
112 | return row[0] | ||
113 | else: | 122 | else: |
114 | return None | 123 | return self.find_max_value(version, pkgarch, checksum) |
115 | 124 | ||
116 | def find_max_value(self, version, pkgarch): | ||
117 | """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value""" | ||
118 | 125 | ||
119 | data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table), | 126 | def _find_extremum_value(self, version, pkgarch, checksum, is_max): |
120 | (version, pkgarch)) | 127 | """Returns the maximum (if is_max is True) or minimum (if is_max is False) value |
121 | row = data.fetchone() | 128 | for (version, pkgarch, checksum), or None if not found. Doesn't create a new value""" |
122 | if row is not None: | ||
123 | return row[0] | ||
124 | else: | ||
125 | return None | ||
126 | |||
127 | def _get_value_hist(self, version, pkgarch, checksum): | ||
128 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
129 | (version, pkgarch, checksum)) | ||
130 | row=data.fetchone() | ||
131 | if row is not None: | ||
132 | return row[0] | ||
133 | else: | ||
134 | #no value found, try to insert | ||
135 | if self.read_only: | ||
136 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
137 | (version, pkgarch)) | ||
138 | row = data.fetchone() | ||
139 | if row is not None: | ||
140 | return row[0] | ||
141 | else: | ||
142 | return 0 | ||
143 | 129 | ||
144 | try: | 130 | with closing(self.conn.cursor()) as cursor: |
145 | self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" | 131 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND checksum=?;" % (self.table), |
146 | % (self.table, self.table), | 132 | (version, pkgarch, checksum)) |
147 | (version, pkgarch, checksum, version, pkgarch)) | 133 | rows = data.fetchall() |
148 | except sqlite3.IntegrityError as exc: | 134 | return self._extremum_value(rows, is_max) |
149 | logger.error(str(exc)) | ||
150 | 135 | ||
151 | self.dirty = True | 136 | def find_max_value(self, version, pkgarch, checksum): |
137 | return self._find_extremum_value(version, pkgarch, checksum, True) | ||
152 | 138 | ||
153 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 139 | def find_min_value(self, version, pkgarch, checksum): |
154 | (version, pkgarch, checksum)) | 140 | return self._find_extremum_value(version, pkgarch, checksum, False) |
155 | row=data.fetchone() | 141 | |
156 | if row is not None: | 142 | def find_new_subvalue(self, version, pkgarch, base): |
157 | return row[0] | 143 | """Take and increase the greatest "<base>.y" value for (version, pkgarch), or return "<base>.0" if not found. |
158 | else: | 144 | This doesn't store a new value.""" |
159 | raise prserv.NotFoundError | 145 | |
160 | 146 | with closing(self.conn.cursor()) as cursor: | |
161 | def _get_value_no_hist(self, version, pkgarch, checksum): | 147 | data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND value LIKE '%s.%%';" % (self.table, base), |
162 | data=self._execute("SELECT value FROM %s \ | 148 | (version, pkgarch)) |
163 | WHERE version=? AND pkgarch=? AND checksum=? AND \ | 149 | rows = data.fetchall() |
164 | value >= (select max(value) from %s where version=? AND pkgarch=?);" | 150 | value = self._max_value(rows) |
165 | % (self.table, self.table), | 151 | |
166 | (version, pkgarch, checksum, version, pkgarch)) | 152 | if value is not None: |
167 | row=data.fetchone() | 153 | return increase_revision(value) |
168 | if row is not None: | ||
169 | return row[0] | ||
170 | else: | ||
171 | #no value found, try to insert | ||
172 | if self.read_only: | ||
173 | data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table), | ||
174 | (version, pkgarch)) | ||
175 | return data.fetchone()[0] | ||
176 | |||
177 | try: | ||
178 | self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" | ||
179 | % (self.table, self.table), | ||
180 | (version, pkgarch, checksum, version, pkgarch)) | ||
181 | except sqlite3.IntegrityError as exc: | ||
182 | logger.error(str(exc)) | ||
183 | self.conn.rollback() | ||
184 | |||
185 | self.dirty = True | ||
186 | |||
187 | data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
188 | (version, pkgarch, checksum)) | ||
189 | row=data.fetchone() | ||
190 | if row is not None: | ||
191 | return row[0] | ||
192 | else: | 154 | else: |
193 | raise prserv.NotFoundError | 155 | return base + ".0" |
194 | 156 | ||
195 | def get_value(self, version, pkgarch, checksum): | 157 | def store_value(self, version, pkgarch, checksum, value): |
196 | if self.nohist: | 158 | """Store value in the database""" |
197 | return self._get_value_no_hist(version, pkgarch, checksum) | 159 | |
198 | else: | 160 | if not self.read_only and not self.test_checksum_value(version, pkgarch, checksum, value): |
199 | return self._get_value_hist(version, pkgarch, checksum) | 161 | with closing(self.conn.cursor()) as cursor: |
200 | 162 | cursor.execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | |
201 | def _import_hist(self, version, pkgarch, checksum, value): | ||
202 | if self.read_only: | ||
203 | return None | ||
204 | |||
205 | val = None | ||
206 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | ||
207 | (version, pkgarch, checksum)) | ||
208 | row = data.fetchone() | ||
209 | if row is not None: | ||
210 | val=row[0] | ||
211 | else: | ||
212 | #no value found, try to insert | ||
213 | try: | ||
214 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | ||
215 | (version, pkgarch, checksum, value)) | 163 | (version, pkgarch, checksum, value)) |
216 | except sqlite3.IntegrityError as exc: | 164 | self.conn.commit() |
217 | logger.error(str(exc)) | ||
218 | 165 | ||
219 | self.dirty = True | 166 | def _get_value(self, version, pkgarch, checksum, history): |
220 | 167 | ||
221 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, | 168 | max_value = self.find_package_max_value(version, pkgarch) |
222 | (version, pkgarch, checksum)) | ||
223 | row = data.fetchone() | ||
224 | if row is not None: | ||
225 | val = row[0] | ||
226 | return val | ||
227 | 169 | ||
228 | def _import_no_hist(self, version, pkgarch, checksum, value): | 170 | if max_value is None: |
229 | if self.read_only: | 171 | # version, pkgarch completely unknown. Return initial value. |
230 | return None | 172 | return "0" |
231 | 173 | ||
232 | try: | 174 | value = self.find_value(version, pkgarch, checksum, history) |
233 | #try to insert | 175 | |
234 | self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), | 176 | if value is None: |
235 | (version, pkgarch, checksum, value)) | 177 | # version, pkgarch found but not checksum. Create a new value from the maximum one |
236 | except sqlite3.IntegrityError as exc: | 178 | return increase_revision(max_value) |
237 | #already have the record, try to update | 179 | |
238 | try: | 180 | if history: |
239 | self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" | 181 | return value |
240 | % (self.table), | 182 | |
241 | (value, version, pkgarch, checksum, value)) | 183 | # "no history" mode - If the value is not the maximum value for the package, need to increase it. |
242 | except sqlite3.IntegrityError as exc: | 184 | if max_value > value: |
243 | logger.error(str(exc)) | 185 | return increase_revision(max_value) |
244 | |||
245 | self.dirty = True | ||
246 | |||
247 | data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table, | ||
248 | (version, pkgarch, checksum, value)) | ||
249 | row=data.fetchone() | ||
250 | if row is not None: | ||
251 | return row[0] | ||
252 | else: | 186 | else: |
253 | return None | 187 | return value |
188 | |||
189 | def get_value(self, version, pkgarch, checksum, history): | ||
190 | value = self._get_value(version, pkgarch, checksum, history) | ||
191 | if not self.read_only: | ||
192 | self.store_value(version, pkgarch, checksum, value) | ||
193 | return value | ||
254 | 194 | ||
255 | def importone(self, version, pkgarch, checksum, value): | 195 | def importone(self, version, pkgarch, checksum, value): |
256 | if self.nohist: | 196 | self.store_value(version, pkgarch, checksum, value) |
257 | return self._import_no_hist(version, pkgarch, checksum, value) | 197 | return value |
258 | else: | ||
259 | return self._import_hist(version, pkgarch, checksum, value) | ||
260 | 198 | ||
261 | def export(self, version, pkgarch, checksum, colinfo): | 199 | def export(self, version, pkgarch, checksum, colinfo, history=False): |
262 | metainfo = {} | 200 | metainfo = {} |
263 | #column info | 201 | with closing(self.conn.cursor()) as cursor: |
264 | if colinfo: | 202 | #column info |
265 | metainfo["tbl_name"] = self.table | 203 | if colinfo: |
266 | metainfo["core_ver"] = prserv.__version__ | 204 | metainfo["tbl_name"] = self.table |
267 | metainfo["col_info"] = [] | 205 | metainfo["core_ver"] = prserv.__version__ |
268 | data = self._execute("PRAGMA table_info(%s);" % self.table) | 206 | metainfo["col_info"] = [] |
207 | data = cursor.execute("PRAGMA table_info(%s);" % self.table) | ||
208 | for row in data: | ||
209 | col = {} | ||
210 | col["name"] = row["name"] | ||
211 | col["type"] = row["type"] | ||
212 | col["notnull"] = row["notnull"] | ||
213 | col["dflt_value"] = row["dflt_value"] | ||
214 | col["pk"] = row["pk"] | ||
215 | metainfo["col_info"].append(col) | ||
216 | |||
217 | #data info | ||
218 | datainfo = [] | ||
219 | |||
220 | if history: | ||
221 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table | ||
222 | else: | ||
223 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ | ||
224 | (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \ | ||
225 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) | ||
226 | sqlarg = [] | ||
227 | where = "" | ||
228 | if version: | ||
229 | where += "AND T1.version=? " | ||
230 | sqlarg.append(str(version)) | ||
231 | if pkgarch: | ||
232 | where += "AND T1.pkgarch=? " | ||
233 | sqlarg.append(str(pkgarch)) | ||
234 | if checksum: | ||
235 | where += "AND T1.checksum=? " | ||
236 | sqlarg.append(str(checksum)) | ||
237 | |||
238 | sqlstmt += where + ";" | ||
239 | |||
240 | if len(sqlarg): | ||
241 | data = cursor.execute(sqlstmt, tuple(sqlarg)) | ||
242 | else: | ||
243 | data = cursor.execute(sqlstmt) | ||
269 | for row in data: | 244 | for row in data: |
270 | col = {} | 245 | if row["version"]: |
271 | col["name"] = row["name"] | 246 | col = {} |
272 | col["type"] = row["type"] | 247 | col["version"] = row["version"] |
273 | col["notnull"] = row["notnull"] | 248 | col["pkgarch"] = row["pkgarch"] |
274 | col["dflt_value"] = row["dflt_value"] | 249 | col["checksum"] = row["checksum"] |
275 | col["pk"] = row["pk"] | 250 | col["value"] = row["value"] |
276 | metainfo["col_info"].append(col) | 251 | datainfo.append(col) |
277 | |||
278 | #data info | ||
279 | datainfo = [] | ||
280 | |||
281 | if self.nohist: | ||
282 | sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \ | ||
283 | (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \ | ||
284 | WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table) | ||
285 | else: | ||
286 | sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table | ||
287 | sqlarg = [] | ||
288 | where = "" | ||
289 | if version: | ||
290 | where += "AND T1.version=? " | ||
291 | sqlarg.append(str(version)) | ||
292 | if pkgarch: | ||
293 | where += "AND T1.pkgarch=? " | ||
294 | sqlarg.append(str(pkgarch)) | ||
295 | if checksum: | ||
296 | where += "AND T1.checksum=? " | ||
297 | sqlarg.append(str(checksum)) | ||
298 | |||
299 | sqlstmt += where + ";" | ||
300 | |||
301 | if len(sqlarg): | ||
302 | data = self._execute(sqlstmt, tuple(sqlarg)) | ||
303 | else: | ||
304 | data = self._execute(sqlstmt) | ||
305 | for row in data: | ||
306 | if row["version"]: | ||
307 | col = {} | ||
308 | col["version"] = row["version"] | ||
309 | col["pkgarch"] = row["pkgarch"] | ||
310 | col["checksum"] = row["checksum"] | ||
311 | col["value"] = row["value"] | ||
312 | datainfo.append(col) | ||
313 | return (metainfo, datainfo) | 252 | return (metainfo, datainfo) |
314 | 253 | ||
315 | def dump_db(self, fd): | 254 | def dump_db(self, fd): |
@@ -322,9 +261,8 @@ class PRTable(object): | |||
322 | 261 | ||
323 | class PRData(object): | 262 | class PRData(object): |
324 | """Object representing the PR database""" | 263 | """Object representing the PR database""" |
325 | def __init__(self, filename, nohist=True, read_only=False): | 264 | def __init__(self, filename, read_only=False): |
326 | self.filename=os.path.abspath(filename) | 265 | self.filename=os.path.abspath(filename) |
327 | self.nohist=nohist | ||
328 | self.read_only = read_only | 266 | self.read_only = read_only |
329 | #build directory hierarchy | 267 | #build directory hierarchy |
330 | try: | 268 | try: |
@@ -334,14 +272,15 @@ class PRData(object): | |||
334 | raise e | 272 | raise e |
335 | uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") | 273 | uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") |
336 | logger.debug("Opening PRServ database '%s'" % (uri)) | 274 | logger.debug("Opening PRServ database '%s'" % (uri)) |
337 | self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False) | 275 | self.connection=sqlite3.connect(uri, uri=True) |
338 | self.connection.row_factory=sqlite3.Row | 276 | self.connection.row_factory=sqlite3.Row |
339 | if not self.read_only: | 277 | self.connection.execute("PRAGMA synchronous = OFF;") |
340 | self.connection.execute("pragma synchronous = off;") | 278 | self.connection.execute("PRAGMA journal_mode = WAL;") |
341 | self.connection.execute("PRAGMA journal_mode = MEMORY;") | 279 | self.connection.commit() |
342 | self._tables={} | 280 | self._tables={} |
343 | 281 | ||
344 | def disconnect(self): | 282 | def disconnect(self): |
283 | self.connection.commit() | ||
345 | self.connection.close() | 284 | self.connection.close() |
346 | 285 | ||
347 | def __getitem__(self, tblname): | 286 | def __getitem__(self, tblname): |
@@ -351,7 +290,7 @@ class PRData(object): | |||
351 | if tblname in self._tables: | 290 | if tblname in self._tables: |
352 | return self._tables[tblname] | 291 | return self._tables[tblname] |
353 | else: | 292 | else: |
354 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only) | 293 | tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.read_only) |
355 | return tableobj | 294 | return tableobj |
356 | 295 | ||
357 | def __delitem__(self, tblname): | 296 | def __delitem__(self, tblname): |
@@ -359,3 +298,4 @@ class PRData(object): | |||
359 | del self._tables[tblname] | 298 | del self._tables[tblname] |
360 | logger.info("drop table %s" % (tblname)) | 299 | logger.info("drop table %s" % (tblname)) |
361 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) | 300 | self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) |
301 | self.connection.commit() | ||
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py index dc4be5b620..e175886308 100644 --- a/bitbake/lib/prserv/serv.py +++ b/bitbake/lib/prserv/serv.py | |||
@@ -12,6 +12,7 @@ import sqlite3 | |||
12 | import prserv | 12 | import prserv |
13 | import prserv.db | 13 | import prserv.db |
14 | import errno | 14 | import errno |
15 | from . import create_async_client, revision_smaller, increase_revision | ||
15 | import bb.asyncrpc | 16 | import bb.asyncrpc |
16 | 17 | ||
17 | logger = logging.getLogger("BitBake.PRserv") | 18 | logger = logging.getLogger("BitBake.PRserv") |
@@ -41,18 +42,16 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
41 | try: | 42 | try: |
42 | return await super().dispatch_message(msg) | 43 | return await super().dispatch_message(msg) |
43 | except: | 44 | except: |
44 | self.server.table.sync() | ||
45 | raise | 45 | raise |
46 | else: | ||
47 | self.server.table.sync_if_dirty() | ||
48 | 46 | ||
49 | async def handle_test_pr(self, request): | 47 | async def handle_test_pr(self, request): |
50 | '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' | 48 | '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' |
51 | version = request["version"] | 49 | version = request["version"] |
52 | pkgarch = request["pkgarch"] | 50 | pkgarch = request["pkgarch"] |
53 | checksum = request["checksum"] | 51 | checksum = request["checksum"] |
52 | history = request["history"] | ||
54 | 53 | ||
55 | value = self.server.table.find_value(version, pkgarch, checksum) | 54 | value = self.server.table.find_value(version, pkgarch, checksum, history) |
56 | return {"value": value} | 55 | return {"value": value} |
57 | 56 | ||
58 | async def handle_test_package(self, request): | 57 | async def handle_test_package(self, request): |
@@ -68,22 +67,110 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
68 | version = request["version"] | 67 | version = request["version"] |
69 | pkgarch = request["pkgarch"] | 68 | pkgarch = request["pkgarch"] |
70 | 69 | ||
71 | value = self.server.table.find_max_value(version, pkgarch) | 70 | value = self.server.table.find_package_max_value(version, pkgarch) |
72 | return {"value": value} | 71 | return {"value": value} |
73 | 72 | ||
74 | async def handle_get_pr(self, request): | 73 | async def handle_get_pr(self, request): |
75 | version = request["version"] | 74 | version = request["version"] |
76 | pkgarch = request["pkgarch"] | 75 | pkgarch = request["pkgarch"] |
77 | checksum = request["checksum"] | 76 | checksum = request["checksum"] |
77 | history = request["history"] | ||
78 | 78 | ||
79 | response = None | 79 | if self.upstream_client is None: |
80 | try: | 80 | value = self.server.table.get_value(version, pkgarch, checksum, history) |
81 | value = self.server.table.get_value(version, pkgarch, checksum) | 81 | return {"value": value} |
82 | response = {"value": value} | ||
83 | except prserv.NotFoundError: | ||
84 | self.logger.error("failure storing value in database for (%s, %s)",version, checksum) | ||
85 | 82 | ||
86 | return response | 83 | # We have an upstream server. |
84 | # Check whether the local server already knows the requested configuration. | ||
85 | # If the configuration is a new one, the generated value we will add will | ||
86 | # depend on what's on the upstream server. That's why we're calling find_value() | ||
87 | # instead of get_value() directly. | ||
88 | |||
89 | value = self.server.table.find_value(version, pkgarch, checksum, history) | ||
90 | upstream_max = await self.upstream_client.max_package_pr(version, pkgarch) | ||
91 | |||
92 | if value is not None: | ||
93 | |||
94 | # The configuration is already known locally. | ||
95 | |||
96 | if history: | ||
97 | value = self.server.table.get_value(version, pkgarch, checksum, history) | ||
98 | else: | ||
99 | existing_value = value | ||
100 | # In "no history", we need to make sure the value doesn't decrease | ||
101 | # and is at least greater than the maximum upstream value | ||
102 | # and the maximum local value | ||
103 | |||
104 | local_max = self.server.table.find_package_max_value(version, pkgarch) | ||
105 | if revision_smaller(value, local_max): | ||
106 | value = increase_revision(local_max) | ||
107 | |||
108 | if revision_smaller(value, upstream_max): | ||
109 | # Ask upstream whether it knows the checksum | ||
110 | upstream_value = await self.upstream_client.test_pr(version, pkgarch, checksum) | ||
111 | if upstream_value is None: | ||
112 | # Upstream doesn't have our checksum, let create a new one | ||
113 | value = upstream_max + ".0" | ||
114 | else: | ||
115 | # Fine to take the same value as upstream | ||
116 | value = upstream_max | ||
117 | |||
118 | if not value == existing_value and not self.server.read_only: | ||
119 | self.server.table.store_value(version, pkgarch, checksum, value) | ||
120 | |||
121 | return {"value": value} | ||
122 | |||
123 | # The configuration is a new one for the local server | ||
124 | # Let's ask the upstream server whether it knows it | ||
125 | |||
126 | known_upstream = await self.upstream_client.test_package(version, pkgarch) | ||
127 | |||
128 | if not known_upstream: | ||
129 | |||
130 | # The package is not known upstream, must be a local-only package | ||
131 | # Let's compute the PR number using the local-only method | ||
132 | |||
133 | value = self.server.table.get_value(version, pkgarch, checksum, history) | ||
134 | return {"value": value} | ||
135 | |||
136 | # The package is known upstream, let's ask the upstream server | ||
137 | # whether it knows our new output hash | ||
138 | |||
139 | value = await self.upstream_client.test_pr(version, pkgarch, checksum) | ||
140 | |||
141 | if value is not None: | ||
142 | |||
143 | # Upstream knows this output hash, let's store it and use it too. | ||
144 | |||
145 | if not self.server.read_only: | ||
146 | self.server.table.store_value(version, pkgarch, checksum, value) | ||
147 | # If the local server is read only, won't be able to store the new | ||
148 | # value in the database and will have to keep asking the upstream server | ||
149 | return {"value": value} | ||
150 | |||
151 | # The output hash doesn't exist upstream, get the most recent number from upstream (x) | ||
152 | # Then, we want to have a new PR value for the local server: x.y | ||
153 | |||
154 | upstream_max = await self.upstream_client.max_package_pr(version, pkgarch) | ||
155 | # Here we know that the package is known upstream, so upstream_max can't be None | ||
156 | subvalue = self.server.table.find_new_subvalue(version, pkgarch, upstream_max) | ||
157 | |||
158 | if not self.server.read_only: | ||
159 | self.server.table.store_value(version, pkgarch, checksum, subvalue) | ||
160 | |||
161 | return {"value": subvalue} | ||
162 | |||
163 | async def process_requests(self): | ||
164 | if self.server.upstream is not None: | ||
165 | self.upstream_client = await create_async_client(self.server.upstream) | ||
166 | else: | ||
167 | self.upstream_client = None | ||
168 | |||
169 | try: | ||
170 | await super().process_requests() | ||
171 | finally: | ||
172 | if self.upstream_client is not None: | ||
173 | await self.upstream_client.close() | ||
87 | 174 | ||
88 | async def handle_import_one(self, request): | 175 | async def handle_import_one(self, request): |
89 | response = None | 176 | response = None |
@@ -104,9 +191,10 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
104 | pkgarch = request["pkgarch"] | 191 | pkgarch = request["pkgarch"] |
105 | checksum = request["checksum"] | 192 | checksum = request["checksum"] |
106 | colinfo = request["colinfo"] | 193 | colinfo = request["colinfo"] |
194 | history = request["history"] | ||
107 | 195 | ||
108 | try: | 196 | try: |
109 | (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo) | 197 | (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo, history) |
110 | except sqlite3.Error as exc: | 198 | except sqlite3.Error as exc: |
111 | self.logger.error(str(exc)) | 199 | self.logger.error(str(exc)) |
112 | metainfo = datainfo = None | 200 | metainfo = datainfo = None |
@@ -117,11 +205,12 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection): | |||
117 | return {"readonly": self.server.read_only} | 205 | return {"readonly": self.server.read_only} |
118 | 206 | ||
119 | class PRServer(bb.asyncrpc.AsyncServer): | 207 | class PRServer(bb.asyncrpc.AsyncServer): |
120 | def __init__(self, dbfile, read_only=False): | 208 | def __init__(self, dbfile, read_only=False, upstream=None): |
121 | super().__init__(logger) | 209 | super().__init__(logger) |
122 | self.dbfile = dbfile | 210 | self.dbfile = dbfile |
123 | self.table = None | 211 | self.table = None |
124 | self.read_only = read_only | 212 | self.read_only = read_only |
213 | self.upstream = upstream | ||
125 | 214 | ||
126 | def accept_client(self, socket): | 215 | def accept_client(self, socket): |
127 | return PRServerClient(socket, self) | 216 | return PRServerClient(socket, self) |
@@ -134,27 +223,25 @@ class PRServer(bb.asyncrpc.AsyncServer): | |||
134 | self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % | 223 | self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % |
135 | (self.dbfile, self.address, str(os.getpid()))) | 224 | (self.dbfile, self.address, str(os.getpid()))) |
136 | 225 | ||
226 | if self.upstream is not None: | ||
227 | self.logger.info("And upstream PRServer: %s " % (self.upstream)) | ||
228 | |||
137 | return tasks | 229 | return tasks |
138 | 230 | ||
139 | async def stop(self): | 231 | async def stop(self): |
140 | self.table.sync_if_dirty() | ||
141 | self.db.disconnect() | 232 | self.db.disconnect() |
142 | await super().stop() | 233 | await super().stop() |
143 | 234 | ||
144 | def signal_handler(self): | ||
145 | super().signal_handler() | ||
146 | if self.table: | ||
147 | self.table.sync() | ||
148 | |||
149 | class PRServSingleton(object): | 235 | class PRServSingleton(object): |
150 | def __init__(self, dbfile, logfile, host, port): | 236 | def __init__(self, dbfile, logfile, host, port, upstream): |
151 | self.dbfile = dbfile | 237 | self.dbfile = dbfile |
152 | self.logfile = logfile | 238 | self.logfile = logfile |
153 | self.host = host | 239 | self.host = host |
154 | self.port = port | 240 | self.port = port |
241 | self.upstream = upstream | ||
155 | 242 | ||
156 | def start(self): | 243 | def start(self): |
157 | self.prserv = PRServer(self.dbfile) | 244 | self.prserv = PRServer(self.dbfile, upstream=self.upstream) |
158 | self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) | 245 | self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) |
159 | self.process = self.prserv.serve_as_process(log_level=logging.WARNING) | 246 | self.process = self.prserv.serve_as_process(log_level=logging.WARNING) |
160 | 247 | ||
@@ -233,7 +320,7 @@ def run_as_daemon(func, pidfile, logfile): | |||
233 | os.remove(pidfile) | 320 | os.remove(pidfile) |
234 | os._exit(0) | 321 | os._exit(0) |
235 | 322 | ||
236 | def start_daemon(dbfile, host, port, logfile, read_only=False): | 323 | def start_daemon(dbfile, host, port, logfile, read_only=False, upstream=None): |
237 | ip = socket.gethostbyname(host) | 324 | ip = socket.gethostbyname(host) |
238 | pidfile = PIDPREFIX % (ip, port) | 325 | pidfile = PIDPREFIX % (ip, port) |
239 | try: | 326 | try: |
@@ -249,7 +336,7 @@ def start_daemon(dbfile, host, port, logfile, read_only=False): | |||
249 | 336 | ||
250 | dbfile = os.path.abspath(dbfile) | 337 | dbfile = os.path.abspath(dbfile) |
251 | def daemon_main(): | 338 | def daemon_main(): |
252 | server = PRServer(dbfile, read_only=read_only) | 339 | server = PRServer(dbfile, read_only=read_only, upstream=upstream) |
253 | server.start_tcp_server(ip, port) | 340 | server.start_tcp_server(ip, port) |
254 | server.serve_forever() | 341 | server.serve_forever() |
255 | 342 | ||
@@ -336,6 +423,9 @@ def auto_start(d): | |||
336 | 423 | ||
337 | host = host_params[0].strip().lower() | 424 | host = host_params[0].strip().lower() |
338 | port = int(host_params[1]) | 425 | port = int(host_params[1]) |
426 | |||
427 | upstream = d.getVar("PRSERV_UPSTREAM") or None | ||
428 | |||
339 | if is_local_special(host, port): | 429 | if is_local_special(host, port): |
340 | import bb.utils | 430 | import bb.utils |
341 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) | 431 | cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) |
@@ -350,7 +440,7 @@ def auto_start(d): | |||
350 | auto_shutdown() | 440 | auto_shutdown() |
351 | if not singleton: | 441 | if not singleton: |
352 | bb.utils.mkdirhier(cachedir) | 442 | bb.utils.mkdirhier(cachedir) |
353 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port) | 443 | singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port, upstream) |
354 | singleton.start() | 444 | singleton.start() |
355 | if singleton: | 445 | if singleton: |
356 | host = singleton.host | 446 | host = singleton.host |
diff --git a/bitbake/lib/prserv/tests.py b/bitbake/lib/prserv/tests.py new file mode 100644 index 0000000000..8765b129f2 --- /dev/null +++ b/bitbake/lib/prserv/tests.py | |||
@@ -0,0 +1,386 @@ | |||
1 | #! /usr/bin/env python3 | ||
2 | # | ||
3 | # Copyright (C) 2024 BitBake Contributors | ||
4 | # | ||
5 | # SPDX-License-Identifier: GPL-2.0-only | ||
6 | # | ||
7 | |||
8 | from . import create_server, create_client, increase_revision, revision_greater, revision_smaller, _revision_greater_or_equal | ||
9 | import prserv.db as db | ||
10 | from bb.asyncrpc import InvokeError | ||
11 | import logging | ||
12 | import os | ||
13 | import sys | ||
14 | import tempfile | ||
15 | import unittest | ||
16 | import socket | ||
17 | import subprocess | ||
18 | from pathlib import Path | ||
19 | |||
20 | THIS_DIR = Path(__file__).parent | ||
21 | BIN_DIR = THIS_DIR.parent.parent / "bin" | ||
22 | |||
23 | version = "dummy-1.0-r0" | ||
24 | pkgarch = "core2-64" | ||
25 | other_arch = "aarch64" | ||
26 | |||
27 | checksumX = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4f0" | ||
28 | checksum0 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a0" | ||
29 | checksum1 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a1" | ||
30 | checksum2 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a2" | ||
31 | checksum3 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a3" | ||
32 | checksum4 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a4" | ||
33 | checksum5 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a5" | ||
34 | checksum6 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a6" | ||
35 | checksum7 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a7" | ||
36 | checksum8 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a8" | ||
37 | checksum9 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a9" | ||
38 | checksum10 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4aa" | ||
39 | |||
40 | def server_prefunc(server, name): | ||
41 | logging.basicConfig(level=logging.DEBUG, filename='prserv-%s.log' % name, filemode='w', | ||
42 | format='%(levelname)s %(filename)s:%(lineno)d %(message)s') | ||
43 | server.logger.debug("Running server %s" % name) | ||
44 | sys.stdout = open('prserv-stdout-%s.log' % name, 'w') | ||
45 | sys.stderr = sys.stdout | ||
46 | |||
47 | class PRTestSetup(object): | ||
48 | |||
49 | def start_server(self, name, dbfile, upstream=None, read_only=False, prefunc=server_prefunc): | ||
50 | |||
51 | def cleanup_server(server): | ||
52 | if server.process.exitcode is not None: | ||
53 | return | ||
54 | server.process.terminate() | ||
55 | server.process.join() | ||
56 | |||
57 | server = create_server(socket.gethostbyname("localhost") + ":0", | ||
58 | dbfile, | ||
59 | upstream=upstream, | ||
60 | read_only=read_only) | ||
61 | |||
62 | server.serve_as_process(prefunc=prefunc, args=(name,)) | ||
63 | self.addCleanup(cleanup_server, server) | ||
64 | |||
65 | return server | ||
66 | |||
67 | def start_client(self, server_address): | ||
68 | def cleanup_client(client): | ||
69 | client.close() | ||
70 | |||
71 | client = create_client(server_address) | ||
72 | self.addCleanup(cleanup_client, client) | ||
73 | |||
74 | return client | ||
75 | |||
76 | class FunctionTests(unittest.TestCase): | ||
77 | |||
78 | def setUp(self): | ||
79 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
80 | self.addCleanup(self.temp_dir.cleanup) | ||
81 | |||
82 | def test_increase_revision(self): | ||
83 | self.assertEqual(increase_revision("1"), "2") | ||
84 | self.assertEqual(increase_revision("1.0"), "1.1") | ||
85 | self.assertEqual(increase_revision("1.1.1"), "1.1.2") | ||
86 | self.assertEqual(increase_revision("1.1.1.3"), "1.1.1.4") | ||
87 | self.assertRaises(ValueError, increase_revision, "1.a") | ||
88 | self.assertRaises(ValueError, increase_revision, "1.") | ||
89 | self.assertRaises(ValueError, increase_revision, "") | ||
90 | |||
91 | def test_revision_greater_or_equal(self): | ||
92 | self.assertTrue(_revision_greater_or_equal("2", "2")) | ||
93 | self.assertTrue(_revision_greater_or_equal("2", "1")) | ||
94 | self.assertTrue(_revision_greater_or_equal("10", "2")) | ||
95 | self.assertTrue(_revision_greater_or_equal("1.10", "1.2")) | ||
96 | self.assertFalse(_revision_greater_or_equal("1.2", "1.10")) | ||
97 | self.assertTrue(_revision_greater_or_equal("1.10", "1")) | ||
98 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10")) | ||
99 | self.assertFalse(_revision_greater_or_equal("1.10.1", "1.10.2")) | ||
100 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10.1")) | ||
101 | self.assertTrue(_revision_greater_or_equal("1.10.1", "1")) | ||
102 | self.assertTrue(revision_greater("1.20", "1.3")) | ||
103 | self.assertTrue(revision_smaller("1.3", "1.20")) | ||
104 | |||
105 | # DB tests | ||
106 | |||
107 | def test_db(self): | ||
108 | dbfile = os.path.join(self.temp_dir.name, "testtable.sqlite3") | ||
109 | |||
110 | self.db = db.PRData(dbfile) | ||
111 | self.table = self.db["PRMAIN"] | ||
112 | |||
113 | self.table.store_value(version, pkgarch, checksum0, "0") | ||
114 | self.table.store_value(version, pkgarch, checksum1, "1") | ||
115 | # "No history" mode supports multiple PRs for the same checksum | ||
116 | self.table.store_value(version, pkgarch, checksum0, "2") | ||
117 | self.table.store_value(version, pkgarch, checksum2, "1.0") | ||
118 | |||
119 | self.assertTrue(self.table.test_package(version, pkgarch)) | ||
120 | self.assertFalse(self.table.test_package(version, other_arch)) | ||
121 | |||
122 | self.assertTrue(self.table.test_value(version, pkgarch, "0")) | ||
123 | self.assertTrue(self.table.test_value(version, pkgarch, "1")) | ||
124 | self.assertTrue(self.table.test_value(version, pkgarch, "2")) | ||
125 | |||
126 | self.assertEqual(self.table.find_package_max_value(version, pkgarch), "2") | ||
127 | |||
128 | self.assertEqual(self.table.find_min_value(version, pkgarch, checksum0), "0") | ||
129 | self.assertEqual(self.table.find_max_value(version, pkgarch, checksum0), "2") | ||
130 | |||
131 | # Test history modes | ||
132 | self.assertEqual(self.table.find_value(version, pkgarch, checksum0, True), "0") | ||
133 | self.assertEqual(self.table.find_value(version, pkgarch, checksum0, False), "2") | ||
134 | |||
135 | self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "3"), "3.0") | ||
136 | self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "1"), "1.1") | ||
137 | |||
138 | # Revision comparison tests | ||
139 | self.table.store_value(version, pkgarch, checksum1, "1.3") | ||
140 | self.table.store_value(version, pkgarch, checksum1, "1.20") | ||
141 | self.assertEqual(self.table.find_min_value(version, pkgarch, checksum1), "1") | ||
142 | self.assertEqual(self.table.find_max_value(version, pkgarch, checksum1), "1.20") | ||
143 | |||
144 | class PRBasicTests(PRTestSetup, unittest.TestCase): | ||
145 | |||
146 | def setUp(self): | ||
147 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
148 | self.addCleanup(self.temp_dir.cleanup) | ||
149 | |||
150 | dbfile = os.path.join(self.temp_dir.name, "prtest-basic.sqlite3") | ||
151 | |||
152 | self.server1 = self.start_server("basic", dbfile) | ||
153 | self.client1 = self.start_client(self.server1.address) | ||
154 | |||
155 | def test_basic(self): | ||
156 | |||
157 | # Checks on non existing configuration | ||
158 | |||
159 | result = self.client1.test_pr(version, pkgarch, checksum0) | ||
160 | self.assertIsNone(result, "test_pr should return 'None' for a non existing PR") | ||
161 | |||
162 | result = self.client1.test_package(version, pkgarch) | ||
163 | self.assertFalse(result, "test_package should return 'False' for a non existing PR") | ||
164 | |||
165 | result = self.client1.max_package_pr(version, pkgarch) | ||
166 | self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR") | ||
167 | |||
168 | # Add a first configuration | ||
169 | |||
170 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
171 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
172 | |||
173 | result = self.client1.test_pr(version, pkgarch, checksum0) | ||
174 | self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR") | ||
175 | |||
176 | result = self.client1.test_package(version, pkgarch) | ||
177 | self.assertTrue(result, "test_package should return 'True' for an existing PR") | ||
178 | |||
179 | result = self.client1.max_package_pr(version, pkgarch) | ||
180 | self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series") | ||
181 | |||
182 | # Check that the same request gets the same value | ||
183 | |||
184 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
185 | self.assertEqual(result, "0", "getPR: asking for the same PR a second time in a row should return the same value.") | ||
186 | |||
187 | # Add new configurations | ||
188 | |||
189 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
190 | self.assertEqual(result, "1", "getPR: second PR of a package should be '1'") | ||
191 | |||
192 | result = self.client1.test_pr(version, pkgarch, checksum1) | ||
193 | self.assertEqual(result, "1", "test_pr should return '1' here, matching the result of getPR") | ||
194 | |||
195 | result = self.client1.max_package_pr(version, pkgarch) | ||
196 | self.assertEqual(result, "1", "max_package_pr should return '1' in the current test series") | ||
197 | |||
198 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
199 | self.assertEqual(result, "2", "getPR: second PR of a package should be '2'") | ||
200 | |||
201 | result = self.client1.test_pr(version, pkgarch, checksum2) | ||
202 | self.assertEqual(result, "2", "test_pr should return '2' here, matching the result of getPR") | ||
203 | |||
204 | result = self.client1.max_package_pr(version, pkgarch) | ||
205 | self.assertEqual(result, "2", "max_package_pr should return '2' in the current test series") | ||
206 | |||
207 | result = self.client1.getPR(version, pkgarch, checksum3) | ||
208 | self.assertEqual(result, "3", "getPR: second PR of a package should be '3'") | ||
209 | |||
210 | result = self.client1.test_pr(version, pkgarch, checksum3) | ||
211 | self.assertEqual(result, "3", "test_pr should return '3' here, matching the result of getPR") | ||
212 | |||
213 | result = self.client1.max_package_pr(version, pkgarch) | ||
214 | self.assertEqual(result, "3", "max_package_pr should return '3' in the current test series") | ||
215 | |||
216 | # Ask again for the first configuration | ||
217 | |||
218 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
219 | self.assertEqual(result, "4", "getPR: should return '4' in this configuration") | ||
220 | |||
221 | # Ask again with explicit "no history" mode | ||
222 | |||
223 | result = self.client1.getPR(version, pkgarch, checksum0, False) | ||
224 | self.assertEqual(result, "4", "getPR: should return '4' in this configuration") | ||
225 | |||
226 | # Ask again with explicit "history" mode. This should return the first recorded PR for checksum0 | ||
227 | |||
228 | result = self.client1.getPR(version, pkgarch, checksum0, True) | ||
229 | self.assertEqual(result, "0", "getPR: should return '0' in this configuration") | ||
230 | |||
231 | # Check again that another pkgarg resets the counters | ||
232 | |||
233 | result = self.client1.test_pr(version, other_arch, checksum0) | ||
234 | self.assertIsNone(result, "test_pr should return 'None' for a non existing PR") | ||
235 | |||
236 | result = self.client1.test_package(version, other_arch) | ||
237 | self.assertFalse(result, "test_package should return 'False' for a non existing PR") | ||
238 | |||
239 | result = self.client1.max_package_pr(version, other_arch) | ||
240 | self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR") | ||
241 | |||
242 | # Now add the configuration | ||
243 | |||
244 | result = self.client1.getPR(version, other_arch, checksum0) | ||
245 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
246 | |||
247 | result = self.client1.test_pr(version, other_arch, checksum0) | ||
248 | self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR") | ||
249 | |||
250 | result = self.client1.test_package(version, other_arch) | ||
251 | self.assertTrue(result, "test_package should return 'True' for an existing PR") | ||
252 | |||
253 | result = self.client1.max_package_pr(version, other_arch) | ||
254 | self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series") | ||
255 | |||
256 | result = self.client1.is_readonly() | ||
257 | self.assertFalse(result, "Server should not be described as 'read-only'") | ||
258 | |||
259 | class PRUpstreamTests(PRTestSetup, unittest.TestCase): | ||
260 | |||
261 | def setUp(self): | ||
262 | |||
263 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
264 | self.addCleanup(self.temp_dir.cleanup) | ||
265 | |||
266 | dbfile2 = os.path.join(self.temp_dir.name, "prtest-upstream2.sqlite3") | ||
267 | self.server2 = self.start_server("upstream2", dbfile2) | ||
268 | self.client2 = self.start_client(self.server2.address) | ||
269 | |||
270 | dbfile1 = os.path.join(self.temp_dir.name, "prtest-upstream1.sqlite3") | ||
271 | self.server1 = self.start_server("upstream1", dbfile1, upstream=self.server2.address) | ||
272 | self.client1 = self.start_client(self.server1.address) | ||
273 | |||
274 | dbfile0 = os.path.join(self.temp_dir.name, "prtest-local.sqlite3") | ||
275 | self.server0 = self.start_server("local", dbfile0, upstream=self.server1.address) | ||
276 | self.client0 = self.start_client(self.server0.address) | ||
277 | self.shared_db = dbfile0 | ||
278 | |||
279 | def test_upstream_and_readonly(self): | ||
280 | |||
281 | # For identical checksums, all servers should return the same PR | ||
282 | |||
283 | result = self.client2.getPR(version, pkgarch, checksum0) | ||
284 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'") | ||
285 | |||
286 | result = self.client1.getPR(version, pkgarch, checksum0) | ||
287 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)") | ||
288 | |||
289 | result = self.client0.getPR(version, pkgarch, checksum0) | ||
290 | self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)") | ||
291 | |||
292 | # Now introduce new checksums on server1 for, same version | ||
293 | |||
294 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
295 | self.assertEqual(result, "0.0", "getPR: first PR of a package which has a different checksum upstream should be '0.0'") | ||
296 | |||
297 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
298 | self.assertEqual(result, "0.1", "getPR: second PR of a package that has a different checksum upstream should be '0.1'") | ||
299 | |||
300 | # Now introduce checksums on server0 for, same version | ||
301 | |||
302 | result = self.client1.getPR(version, pkgarch, checksum1) | ||
303 | self.assertEqual(result, "0.2", "getPR: can't decrease for known PR") | ||
304 | |||
305 | result = self.client1.getPR(version, pkgarch, checksum2) | ||
306 | self.assertEqual(result, "0.3") | ||
307 | |||
308 | result = self.client1.max_package_pr(version, pkgarch) | ||
309 | self.assertEqual(result, "0.3") | ||
310 | |||
311 | result = self.client0.getPR(version, pkgarch, checksum3) | ||
312 | self.assertEqual(result, "0.3.0", "getPR: first PR of a package that doesn't exist upstream should be '0.3.0'") | ||
313 | |||
314 | result = self.client0.getPR(version, pkgarch, checksum4) | ||
315 | self.assertEqual(result, "0.3.1", "getPR: second PR of a package that doesn't exist upstream should be '0.3.1'") | ||
316 | |||
317 | result = self.client0.getPR(version, pkgarch, checksum3) | ||
318 | self.assertEqual(result, "0.3.2") | ||
319 | |||
320 | # More upstream updates | ||
321 | # Here, we assume no communication between server2 and server0. server2 only impacts server0 | ||
322 | # after impacting server1 | ||
323 | |||
324 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum5), "1") | ||
325 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum6), "1.0") | ||
326 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "1.1") | ||
327 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum8), "1.1.0") | ||
328 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum9), "1.1.1") | ||
329 | |||
330 | # "history" mode tests | ||
331 | |||
332 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum0, True), "0") | ||
333 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum2, True), "0.1") | ||
334 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum3, True), "0.3.0") | ||
335 | |||
336 | # More "no history" mode tests | ||
337 | |||
338 | self.assertEqual(self.client2.getPR(version, pkgarch, checksum0), "2") | ||
339 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum0), "2") # Same as upstream | ||
340 | self.assertEqual(self.client0.getPR(version, pkgarch, checksum0), "2") # Same as upstream | ||
341 | self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "3") # This could be surprising, but since the previous revision was "2", increasing it yields "3". | ||
342 | # We don't know how many upstream servers we have | ||
343 | # Start read-only server with server1 as upstream | ||
344 | self.server_ro = self.start_server("local-ro", self.shared_db, upstream=self.server1.address, read_only=True) | ||
345 | self.client_ro = self.start_client(self.server_ro.address) | ||
346 | |||
347 | self.assertTrue(self.client_ro.is_readonly(), "Database should be described as 'read-only'") | ||
348 | |||
349 | # Checks on non existing configurations | ||
350 | self.assertIsNone(self.client_ro.test_pr(version, pkgarch, checksumX)) | ||
351 | self.assertFalse(self.client_ro.test_package("unknown", pkgarch)) | ||
352 | |||
353 | # Look up existing configurations | ||
354 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0), "3") # "no history" mode | ||
355 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0, True), "0") # "history" mode | ||
356 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3), "3") | ||
357 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3, True), "0.3.0") | ||
358 | self.assertEqual(self.client_ro.max_package_pr(version, pkgarch), "2") # normal as "3" was never saved | ||
359 | |||
360 | # Try to insert a new value. Here this one is know upstream. | ||
361 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum7), "3") | ||
362 | # Try to insert a completely new value. As the max upstream value is already "3", it should be "3.0" | ||
363 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum10), "3.0") | ||
364 | # Same with another value which only exists in the upstream upstream server | ||
365 | # This time, as the upstream server doesn't know it, it will ask its upstream server. So that's a known one. | ||
366 | self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum9), "3") | ||
367 | |||
368 | class ScriptTests(unittest.TestCase): | ||
369 | |||
370 | def setUp(self): | ||
371 | |||
372 | self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv') | ||
373 | self.addCleanup(self.temp_dir.cleanup) | ||
374 | self.dbfile = os.path.join(self.temp_dir.name, "prtest.sqlite3") | ||
375 | |||
376 | def test_1_start_bitbake_prserv(self): | ||
377 | try: | ||
378 | subprocess.check_call([BIN_DIR / "bitbake-prserv", "--start", "-f", self.dbfile]) | ||
379 | except subprocess.CalledProcessError as e: | ||
380 | self.fail("Failed to start bitbake-prserv: %s" % e.returncode) | ||
381 | |||
382 | def test_2_stop_bitbake_prserv(self): | ||
383 | try: | ||
384 | subprocess.check_call([BIN_DIR / "bitbake-prserv", "--stop"]) | ||
385 | except subprocess.CalledProcessError as e: | ||
386 | self.fail("Failed to stop bitbake-prserv: %s" % e.returncode) | ||
diff --git a/documentation/brief-yoctoprojectqs/index.rst b/documentation/brief-yoctoprojectqs/index.rst index 4301bfd970..c5400e4ac8 100644 --- a/documentation/brief-yoctoprojectqs/index.rst +++ b/documentation/brief-yoctoprojectqs/index.rst | |||
@@ -256,8 +256,11 @@ an entire Linux distribution, including the toolchain, from source. | |||
256 | BB_HASHSERVE = "auto" | 256 | BB_HASHSERVE = "auto" |
257 | BB_SIGNATURE_HANDLER = "OEEquivHash" | 257 | BB_SIGNATURE_HANDLER = "OEEquivHash" |
258 | 258 | ||
259 | The hash equivalence server needs a recent version of python | 259 | The hash equivalence server needs the websockets python module version 9.1 |
260 | websockets installed to work correctly. | 260 | or later. Debian GNU/Linux 12 (Bookworm) and later, Fedora, CentOS Stream |
261 | 9 and later, and Ubuntu 22.04 (LTS) and later, all have a recent enough | ||
262 | package. Other supported distributions need to get the module some other | ||
263 | place than their package feed, e.g. via ``pip``. | ||
261 | 264 | ||
262 | #. **Start the Build:** Continue with the following command to build an OS | 265 | #. **Start the Build:** Continue with the following command to build an OS |
263 | image for the target, which is ``core-image-sato`` in this example: | 266 | image for the target, which is ``core-image-sato`` in this example: |
diff --git a/documentation/dev-manual/vulnerabilities.rst b/documentation/dev-manual/vulnerabilities.rst index 1bc2a85929..983d4ad3c6 100644 --- a/documentation/dev-manual/vulnerabilities.rst +++ b/documentation/dev-manual/vulnerabilities.rst | |||
@@ -57,6 +57,10 @@ applied and that the issue needs to be investigated. ``Ignored`` means that afte | |||
57 | analysis, it has been deemed to ignore the issue as it for example affects | 57 | analysis, it has been deemed to ignore the issue as it for example affects |
58 | the software component on a different operating system platform. | 58 | the software component on a different operating system platform. |
59 | 59 | ||
60 | By default, no NVD API key is used to retrieve data from the CVE database, which | ||
61 | results in larger delays between NVD API requests. See the :term:`NVDCVE_API_KEY` | ||
62 | documentation on how to request and set a NVD API key. | ||
63 | |||
60 | After a build with CVE check enabled, reports for each compiled source recipe will be | 64 | After a build with CVE check enabled, reports for each compiled source recipe will be |
61 | found in ``build/tmp/deploy/cve``. | 65 | found in ``build/tmp/deploy/cve``. |
62 | 66 | ||
diff --git a/documentation/migration-guides/index.rst b/documentation/migration-guides/index.rst index d8edd05b89..e9d7f72809 100644 --- a/documentation/migration-guides/index.rst +++ b/documentation/migration-guides/index.rst | |||
@@ -12,6 +12,7 @@ to move to one release of the Yocto Project from the previous one. | |||
12 | .. toctree:: | 12 | .. toctree:: |
13 | 13 | ||
14 | migration-general | 14 | migration-general |
15 | release-5.1 | ||
15 | release-5.0 | 16 | release-5.0 |
16 | release-4.3 | 17 | release-4.3 |
17 | release-4.2 | 18 | release-4.2 |
diff --git a/documentation/migration-guides/migration-5.1.rst b/documentation/migration-guides/migration-5.1.rst new file mode 100644 index 0000000000..b43fecc2dd --- /dev/null +++ b/documentation/migration-guides/migration-5.1.rst | |||
@@ -0,0 +1,143 @@ | |||
1 | .. SPDX-License-Identifier: CC-BY-SA-2.0-UK | ||
2 | |||
3 | Release 5.1 (styhead) | ||
4 | ===================== | ||
5 | |||
6 | Migration notes for 5.1 (styhead) | ||
7 | ------------------------------------ | ||
8 | |||
9 | This section provides migration information for moving to the Yocto | ||
10 | Project 5.1 Release (codename "styhead") from the prior release. | ||
11 | |||
12 | .. _migration-5.1-supported-kernel-versions: | ||
13 | |||
14 | :term:`WORKDIR` changes | ||
15 | ~~~~~~~~~~~~~~~~~~~~~~~ | ||
16 | |||
17 | S = ${WORKDIR} no longer supported | ||
18 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||
19 | |||
20 | If a recipe has :term:`S` set to be :term:`WORKDIR`, this is no longer | ||
21 | supported, and an error will be issued. The recipe should be changed to:: | ||
22 | |||
23 | S = "${WORKDIR}/sources" | ||
24 | UNPACKDIR = "${S}" | ||
25 | |||
26 | Any :term:`WORKDIR` references where files from :term:`SRC_URI` are referenced | ||
27 | should be changed to :term:`S`. These are commonly in :ref:`ref-tasks-compile`, | ||
28 | :ref:`ref-tasks-compile`, :ref:`ref-tasks-install` and :term:`LIC_FILES_CHKSUM`. | ||
29 | |||
30 | :term:`WORKDIR` references in recipes | ||
31 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||
32 | |||
33 | :term:`WORKDIR` references in other recipes need auditing. If they reference | ||
34 | files from :term:`SRC_URI`, they likely need changing to :term:`UNPACKDIR`. | ||
35 | These are commonly in :ref:`ref-tasks-compile` and :ref:`ref-tasks-install` | ||
36 | for things like service or configuration files. One unusual case is | ||
37 | ``${WORKDIR}/${BP}`` which should probably be set to ``${S}``. | ||
38 | |||
39 | References to ``../`` in :term:`LIC_FILES_CHKSUM` or elsewhere may need changing | ||
40 | to :term:`UNPACKDIR`. References to :term:`WORKDIR` in ``sed`` commands are | ||
41 | usually left as they are. | ||
42 | |||
43 | General notes | ||
44 | ^^^^^^^^^^^^^ | ||
45 | |||
46 | Files from :ref:`ref-tasks-unpack` now unpack to ``WORKDIR/sources-unpack/`` | ||
47 | rather than ``WORKDIR/``. | ||
48 | |||
49 | If :term:`S` is set to a subdirectory under :term:`WORKDIR` and that | ||
50 | subdirectory exists in ``sources-unpack`` after :ref:`ref-tasks-unpack` runs, | ||
51 | it is moved to :term:`WORKDIR`. This means that ``S = "${WORKDIR}/${BP}"``, | ||
52 | ``S = "${WORKDIR}/git"`` and also deeper paths continue to work as expected | ||
53 | without changes. We cannot use symlinks to do this as it breaks autotools | ||
54 | based recipes. Keeping all sources under ``sources-unpack`` wasn't considered | ||
55 | as it meant more invasive recipes changes. The key objective was separating the | ||
56 | :ref:`ref-tasks-unpack` task output from :term:`WORKDIR`. | ||
57 | |||
58 | Previously, :term:`S` was always created but after the recent changes it is no | ||
59 | longer the case. This means the check in ``do_unpack_qa`` triggers where | ||
60 | :term:`S` is not created by a recipe while it didn't happen before. This can | ||
61 | require to add an :term:`S` definition to a recipe that only uses | ||
62 | ``file://`` :term:`SRC_URI` entries. To be consistent, the following pattern is | ||
63 | recommended:: | ||
64 | |||
65 | S = "${WORKDIR}/sources" | ||
66 | UNPACKDIR = "${S}" | ||
67 | |||
68 | Building C files from :term:`UNPACKDIR` without setting :term:`S` to point at | ||
69 | it does not work as the debug prefix mapping doesn't handle that. | ||
70 | |||
71 | ``devtool`` and ``recipetool`` have been updated to handle this and their | ||
72 | support for ``S = WORKDIR`` and ``oe-local-files`` has been removed. | ||
73 | |||
74 | Supported kernel versions | ||
75 | ~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
76 | |||
77 | The :term:`OLDEST_KERNEL` setting is still "5.15" in this release, meaning that | ||
78 | out the box, older kernels are not supported. See :ref:`4.3 migration notes | ||
79 | <migration-4.3-supported-kernel-versions>` for details. | ||
80 | |||
81 | .. _migration-5.1-supported-distributions: | ||
82 | |||
83 | Supported distributions | ||
84 | ~~~~~~~~~~~~~~~~~~~~~~~ | ||
85 | |||
86 | Compared to the previous releases, running BitBake is supported on new | ||
87 | GNU/Linux distributions: | ||
88 | |||
89 | On the other hand, some earlier distributions are no longer supported: | ||
90 | |||
91 | See :ref:`all supported distributions <system-requirements-supported-distros>`. | ||
92 | |||
93 | .. _migration-5.1-go-changes: | ||
94 | |||
95 | Go language changes | ||
96 | ~~~~~~~~~~~~~~~~~~~ | ||
97 | |||
98 | .. _migration-5.1-systemd-changes: | ||
99 | |||
100 | systemd changes | ||
101 | ~~~~~~~~~~~~~~~ | ||
102 | |||
103 | .. _migration-5.1-recipe-changes: | ||
104 | |||
105 | Recipe changes | ||
106 | ~~~~~~~~~~~~~~ | ||
107 | |||
108 | .. _migration-5.1-deprecated-variables: | ||
109 | |||
110 | Deprecated variables | ||
111 | ~~~~~~~~~~~~~~~~~~~~ | ||
112 | |||
113 | .. _migration-5.1-removed-variables: | ||
114 | |||
115 | Removed variables | ||
116 | ~~~~~~~~~~~~~~~~~ | ||
117 | |||
118 | The following variables have been removed: | ||
119 | |||
120 | .. _migration-5.1-removed-recipes: | ||
121 | |||
122 | Removed recipes | ||
123 | ~~~~~~~~~~~~~~~ | ||
124 | |||
125 | The following recipes have been removed in this release: | ||
126 | |||
127 | .. _migration-5.1-removed-classes: | ||
128 | |||
129 | Removed classes | ||
130 | ~~~~~~~~~~~~~~~ | ||
131 | |||
132 | No classes have been removed in this release. | ||
133 | |||
134 | .. _migration-5.1-qemu-changes: | ||
135 | |||
136 | QEMU changes | ||
137 | ~~~~~~~~~~~~ | ||
138 | |||
139 | .. _migration-5.1-misc-changes: | ||
140 | |||
141 | Miscellaneous changes | ||
142 | ~~~~~~~~~~~~~~~~~~~~~ | ||
143 | |||
diff --git a/documentation/migration-guides/release-5.0.rst b/documentation/migration-guides/release-5.0.rst index bd19b707f6..44a02ab041 100644 --- a/documentation/migration-guides/release-5.0.rst +++ b/documentation/migration-guides/release-5.0.rst | |||
@@ -7,3 +7,4 @@ Release 5.0 (scarthgap) | |||
7 | 7 | ||
8 | migration-5.0 | 8 | migration-5.0 |
9 | release-notes-5.0 | 9 | release-notes-5.0 |
10 | release-notes-5.0.1 | ||
diff --git a/documentation/migration-guides/release-5.1.rst b/documentation/migration-guides/release-5.1.rst new file mode 100644 index 0000000000..768edfa2b0 --- /dev/null +++ b/documentation/migration-guides/release-5.1.rst | |||
@@ -0,0 +1,9 @@ | |||
1 | .. SPDX-License-Identifier: CC-BY-SA-2.0-UK | ||
2 | |||
3 | Release 5.1 (styhead) | ||
4 | ======================= | ||
5 | |||
6 | .. toctree:: | ||
7 | |||
8 | migration-5.1 | ||
9 | release-notes-5.1 | ||
diff --git a/documentation/migration-guides/release-notes-4.0.18.rst b/documentation/migration-guides/release-notes-4.0.18.rst index fc8cd83c02..d5a2a7f6ce 100644 --- a/documentation/migration-guides/release-notes-4.0.18.rst +++ b/documentation/migration-guides/release-notes-4.0.18.rst | |||
@@ -28,7 +28,7 @@ Fixes in Yocto-4.0.18 | |||
28 | ~~~~~~~~~~~~~~~~~~~~~ | 28 | ~~~~~~~~~~~~~~~~~~~~~ |
29 | 29 | ||
30 | - build-appliance-image: Update to kirkstone head revision | 30 | - build-appliance-image: Update to kirkstone head revision |
31 | - common-licenses: Backport missing license | 31 | - common-licenses: backport LGPL-3.0-with-zeromq-exception license |
32 | - contributor-guide: add notes for tests | 32 | - contributor-guide: add notes for tests |
33 | - contributor-guide: be more specific about meta-* trees | 33 | - contributor-guide: be more specific about meta-* trees |
34 | - cups: fix typo in :cve:`2023-32360` backport patch | 34 | - cups: fix typo in :cve:`2023-32360` backport patch |
diff --git a/documentation/migration-guides/release-notes-5.0.1.rst b/documentation/migration-guides/release-notes-5.0.1.rst new file mode 100644 index 0000000000..a377f92c19 --- /dev/null +++ b/documentation/migration-guides/release-notes-5.0.1.rst | |||
@@ -0,0 +1,134 @@ | |||
1 | .. SPDX-License-Identifier: CC-BY-SA-2.0-UK | ||
2 | |||
3 | Release notes for Yocto-5.0.1 (Scarthgap) | ||
4 | ----------------------------------------- | ||
5 | |||
6 | Security Fixes in Yocto-5.0.1 | ||
7 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
8 | |||
9 | - N/A | ||
10 | |||
11 | |||
12 | Fixes in Yocto-5.0.1 | ||
13 | ~~~~~~~~~~~~~~~~~~~~ | ||
14 | |||
15 | - babeltrace2: upgrade 2.0.5 -> 2.0.6 | ||
16 | - bind: upgrade 9.18.24 -> 9.18.25 | ||
17 | - bitbake: cooker: Use hash client to ping upstream server | ||
18 | - build-appliance-image: Update to scarthgap head revision (b9b47b1a392b...) | ||
19 | - docs: add support for scarthgap 5.0 release | ||
20 | - docs: brief-yoctoprojectqs: explicit version dependency on websockets python module | ||
21 | - docs: brief-yoctoprojectqs: Update to the correct hash equivalence server address | ||
22 | - documentation/poky.yaml.in: drop mesa/sdl from essential host packages | ||
23 | - ell: upgrade 0.63 -> 0.64 | ||
24 | - gcr: upgrade 4.2.0 -> 4.2.1 | ||
25 | - icu: update 74-1 -> 74-2 | ||
26 | - libdnf: upgrade 0.73.0 -> 0.73.1 | ||
27 | - libsdl2: upgrade 2.30.0 -> 2.30.1 | ||
28 | - libx11: upgrade 1.8.7 -> 1.8.9 | ||
29 | - libxcursor: upgrade 1.2.1 -> 1.2.2 | ||
30 | - libxml2: upgrade 2.12.5 -> 2.12.6 | ||
31 | - local.conf.sample: Fix hashequivalence server address | ||
32 | - lttng-tools: upgrade 2.13.11 -> 2.13.13 | ||
33 | - manuals: standards.md: add standard for project names | ||
34 | - mesa: upgrade 24.0.2 -> 24.0.3 | ||
35 | - migration-notes: add release notes for 4.0.18 | ||
36 | - mpg123: upgrade 1.32.5 -> 1.32.6 | ||
37 | - pango: upgrade 1.52.0 -> 1.52.1 | ||
38 | - poky.conf: bump version for 5.0.1 | ||
39 | - python3: skip test_concurrent_futures/test_shutdown | ||
40 | - ref-manual: update releases.svg | ||
41 | - ref-manual: variables: add :term:`USERADD_DEPENDS` | ||
42 | - release-notes-5.0: update Repositories / Downloads section | ||
43 | - release-notes-5.0: update recipes changes | ||
44 | - release-notes-5.0: update new features | ||
45 | - rootfs-postcommands.bbclass: Only set DROPBEAR_RSAKEY_DIR once | ||
46 | - rpm: update 4.19.1 -> 4.19.1.1 | ||
47 | - scripts/oe-setup-build: write a build environment initialization one-liner into the build directory | ||
48 | - sstate.bbclass: Add _SSTATE_EXCLUDEDEPS_SYSROOT to vardepsexclude | ||
49 | - systemd: sed :term:`ROOT_HOME` only if sysusers :term:`PACKAGECONFIG` is set | ||
50 | |||
51 | |||
52 | Known Issues in Yocto-5.0.1 | ||
53 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
54 | |||
55 | - N/A | ||
56 | |||
57 | |||
58 | Contributors to Yocto-5.0.1 | ||
59 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
60 | |||
61 | - Alexander Kanavin | ||
62 | - Christian Bräuner Sørensen | ||
63 | - Joshua Watt | ||
64 | - Lee Chee Yang | ||
65 | - Mark Hatle | ||
66 | - Michael Glembotzki | ||
67 | - Michael Halstead | ||
68 | - Michael Opdenacker | ||
69 | - Paul Eggleton | ||
70 | - Quentin Schulz | ||
71 | - Richard Purdie | ||
72 | - Steve Sakoman | ||
73 | - Trevor Gamblin | ||
74 | - Wang Mingyu | ||
75 | |||
76 | |||
77 | Repositories / Downloads for Yocto-5.0.1 | ||
78 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
79 | |||
80 | poky | ||
81 | |||
82 | - Repository Location: :yocto_git:`/poky` | ||
83 | - Branch: :yocto_git:`scarthgap </poky/log/?h=scarthgap>` | ||
84 | - Tag: :yocto_git:`yocto-5.0.1 </poky/log/?h=yocto-5.0.1>` | ||
85 | - Git Revision: :yocto_git:`4b07a5316ed4b858863dfdb7cab63859d46d1810 </poky/commit/?id=4b07a5316ed4b858863dfdb7cab63859d46d1810>` | ||
86 | - Release Artefact: poky-4b07a5316ed4b858863dfdb7cab63859d46d1810 | ||
87 | - sha: 51d0c84da7dbcc8db04a674da39cfc73ea78aac22ee646ede5b6229937d4666a | ||
88 | - Download Locations: | ||
89 | http://downloads.yoctoproject.org/releases/yocto/yocto-5.0.1/poky-4b07a5316ed4b858863dfdb7cab63859d46d1810.tar.bz2 | ||
90 | http://mirrors.kernel.org/yocto/yocto/yocto-5.0.1/poky-4b07a5316ed4b858863dfdb7cab63859d46d1810.tar.bz2 | ||
91 | |||
92 | openembedded-core | ||
93 | |||
94 | - Repository Location: :oe_git:`/openembedded-core` | ||
95 | - Branch: :oe_git:`scarthgap </openembedded-core/log/?h=scarthgap>` | ||
96 | - Tag: :oe_git:`yocto-5.0.1 </openembedded-core/log/?h=yocto-5.0.1>` | ||
97 | - Git Revision: :oe_git:`294a7dbe44f6b7c8d3a1de8c2cc182af37c4f916 </openembedded-core/commit/?id=294a7dbe44f6b7c8d3a1de8c2cc182af37c4f916>` | ||
98 | - Release Artefact: oecore-294a7dbe44f6b7c8d3a1de8c2cc182af37c4f916 | ||
99 | - sha: e9be51a3b1fe8a1f420483b912caf91bc429dcca303d462381876a643b73045e | ||
100 | - Download Locations: | ||
101 | http://downloads.yoctoproject.org/releases/yocto/yocto-5.0.1/oecore-294a7dbe44f6b7c8d3a1de8c2cc182af37c4f916.tar.bz2 | ||
102 | http://mirrors.kernel.org/yocto/yocto/yocto-5.0.1/oecore-294a7dbe44f6b7c8d3a1de8c2cc182af37c4f916.tar.bz2 | ||
103 | |||
104 | meta-mingw | ||
105 | |||
106 | - Repository Location: :yocto_git:`/meta-mingw` | ||
107 | - Branch: :yocto_git:`scarthgap </meta-mingw/log/?h=scarthgap>` | ||
108 | - Tag: :yocto_git:`yocto-5.0.1 </meta-mingw/log/?h=yocto-5.0.1>` | ||
109 | - Git Revision: :yocto_git:`acbba477893ef87388effc4679b7f40ee49fc852 </meta-mingw/commit/?id=acbba477893ef87388effc4679b7f40ee49fc852>` | ||
110 | - Release Artefact: meta-mingw-acbba477893ef87388effc4679b7f40ee49fc852 | ||
111 | - sha: 3b7c2f475dad5130bace652b150367f587d44b391218b1364a8bbc430b48c54c | ||
112 | - Download Locations: | ||
113 | http://downloads.yoctoproject.org/releases/yocto/yocto-5.0.1/meta-mingw-acbba477893ef87388effc4679b7f40ee49fc852.tar.bz2 | ||
114 | http://mirrors.kernel.org/yocto/yocto/yocto-5.0.1/meta-mingw-acbba477893ef87388effc4679b7f40ee49fc852.tar.bz2 | ||
115 | |||
116 | bitbake | ||
117 | |||
118 | - Repository Location: :oe_git:`/bitbake` | ||
119 | - Branch: :oe_git:`2.8 </bitbake/log/?h=2.8>` | ||
120 | - Tag: :oe_git:`yocto-5.0.1 </bitbake/log/?h=yocto-5.0.1>` | ||
121 | - Git Revision: :oe_git:`8f90d10f9efc9a32e13f6bd031992aece79fe7cc </bitbake/commit/?id=8f90d10f9efc9a32e13f6bd031992aece79fe7cc>` | ||
122 | - Release Artefact: bitbake-8f90d10f9efc9a32e13f6bd031992aece79fe7cc | ||
123 | - sha: 519f02d5de7fbfac411532161d521123814dd9cc7d6b55488b5e7a547c1a6977 | ||
124 | - Download Locations: | ||
125 | http://downloads.yoctoproject.org/releases/yocto/yocto-5.0.1/bitbake-8f90d10f9efc9a32e13f6bd031992aece79fe7cc.tar.bz2 | ||
126 | http://mirrors.kernel.org/yocto/yocto/yocto-5.0.1/bitbake-8f90d10f9efc9a32e13f6bd031992aece79fe7cc.tar.bz2 | ||
127 | |||
128 | yocto-docs | ||
129 | |||
130 | - Repository Location: :yocto_git:`/yocto-docs` | ||
131 | - Branch: :yocto_git:`scarthgap </yocto-docs/log/?h=scarthgap>` | ||
132 | - Tag: :yocto_git:`yocto-5.0.1 </yocto-docs/log/?h=yocto-5.0.1>` | ||
133 | - Git Revision: :yocto_git:`875dfe69e93bf8fee3b8c07818a6ac059f228a13 </yocto-docs/commit/?id=875dfe69e93bf8fee3b8c07818a6ac059f228a13>` | ||
134 | |||
diff --git a/documentation/migration-guides/release-notes-5.1.rst b/documentation/migration-guides/release-notes-5.1.rst new file mode 100644 index 0000000000..32cb2de2b4 --- /dev/null +++ b/documentation/migration-guides/release-notes-5.1.rst | |||
@@ -0,0 +1,67 @@ | |||
1 | .. SPDX-License-Identifier: CC-BY-SA-2.0-UK | ||
2 | |||
3 | Release notes for 5.1 (styhead) | ||
4 | --------------------------------- | ||
5 | |||
6 | New Features / Enhancements in 5.1 | ||
7 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
8 | |||
9 | - Linux kernel 6.X, gcc 14.X, glibc 2.X, LLVM 18.X, and over XXX other recipe upgrades | ||
10 | |||
11 | - New variables: | ||
12 | |||
13 | - Architecture-specific enhancements: | ||
14 | |||
15 | - Kernel-related enhancements: | ||
16 | |||
17 | - New core recipes: | ||
18 | |||
19 | - QEMU / ``runqemu`` enhancements: | ||
20 | |||
21 | - Rust improvements: | ||
22 | |||
23 | - wic Image Creator enhancements: | ||
24 | |||
25 | - SDK-related improvements: | ||
26 | |||
27 | - Testing: | ||
28 | |||
29 | - Utility script changes: | ||
30 | |||
31 | - BitBake improvements: | ||
32 | |||
33 | - devtool improvements: | ||
34 | |||
35 | - recipetool improvements: | ||
36 | |||
37 | - Packaging changes: | ||
38 | |||
39 | - Security improvements: | ||
40 | |||
41 | - Toaster Web UI improvements: | ||
42 | |||
43 | - Prominent documentation updates: | ||
44 | |||
45 | - Miscellaneous changes: | ||
46 | |||
47 | Known Issues in 5.1 | ||
48 | ~~~~~~~~~~~~~~~~~~~ | ||
49 | |||
50 | Recipe License changes in 5.1 | ||
51 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
52 | |||
53 | The following corrections have been made to the :term:`LICENSE` values set by recipes: | ||
54 | |||
55 | Security Fixes in 5.1 | ||
56 | ~~~~~~~~~~~~~~~~~~~~~ | ||
57 | |||
58 | Recipe Upgrades in 5.1 | ||
59 | ~~~~~~~~~~~~~~~~~~~~~~ | ||
60 | |||
61 | Contributors to 5.1 | ||
62 | ~~~~~~~~~~~~~~~~~~~ | ||
63 | |||
64 | Thanks to the following people who contributed to this release: | ||
65 | |||
66 | Repositories / Downloads for Yocto-5.1 | ||
67 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
diff --git a/documentation/ref-manual/tasks.rst b/documentation/ref-manual/tasks.rst index 2e4b23408d..df751d75a3 100644 --- a/documentation/ref-manual/tasks.rst +++ b/documentation/ref-manual/tasks.rst | |||
@@ -412,12 +412,11 @@ them. You can learn more by looking at the | |||
412 | ------------- | 412 | ------------- |
413 | 413 | ||
414 | Unpacks the source code into a working directory pointed to by | 414 | Unpacks the source code into a working directory pointed to by |
415 | ``${``\ :term:`WORKDIR`\ ``}``. The :term:`S` | 415 | ``${``\ :term:`UNPACKDIR`\ ``}``. A legacy way to specify |
416 | variable also plays a role in where unpacked source files ultimately | 416 | this directory is through the :term:`S` and :term:`WORKDIR` variables. |
417 | reside. For more information on how source files are unpacked, see the | 417 | For more information on how source files are unpacked, see the |
418 | ":ref:`overview-manual/concepts:source fetching`" | 418 | ":ref:`overview-manual/concepts:source fetching`" |
419 | section in the Yocto Project Overview and Concepts Manual and also see | 419 | section in the Yocto Project Overview and Concepts Manual. |
420 | the :term:`WORKDIR` and :term:`S` variable descriptions. | ||
421 | 420 | ||
422 | Manually Called Tasks | 421 | Manually Called Tasks |
423 | ===================== | 422 | ===================== |
diff --git a/documentation/ref-manual/variables.rst b/documentation/ref-manual/variables.rst index 3f37f42f21..07b5b6f95c 100644 --- a/documentation/ref-manual/variables.rst +++ b/documentation/ref-manual/variables.rst | |||
@@ -5585,6 +5585,21 @@ system and gives an overview of their function and contents. | |||
5585 | 5585 | ||
5586 | NON_MULTILIB_RECIPES = "grub grub-efi make-mod-scripts ovmf u-boot" | 5586 | NON_MULTILIB_RECIPES = "grub grub-efi make-mod-scripts ovmf u-boot" |
5587 | 5587 | ||
5588 | :term:`NVDCVE_API_KEY` | ||
5589 | The NVD API key used to retrieve data from the CVE database when | ||
5590 | using :ref:`ref-classes-cve-check`. | ||
5591 | |||
5592 | By default, no API key is used, which results in larger delays between API | ||
5593 | requests and limits the number of queries to the public rate limits posted | ||
5594 | at the `NVD developer's page <https://nvd.nist.gov/developers/start-here>`__. | ||
5595 | |||
5596 | NVD API keys can be requested through the | ||
5597 | `Request an API Key <https://nvd.nist.gov/developers/request-an-api-key>`__ | ||
5598 | page. You can set this variable to the NVD API key in your ``local.conf`` file. | ||
5599 | Example:: | ||
5600 | |||
5601 | NVDCVE_API_KEY = "fe753&7a2-1427-347d-23ff-b2e2b7ca5f3" | ||
5602 | |||
5588 | :term:`OBJCOPY` | 5603 | :term:`OBJCOPY` |
5589 | The minimal command and arguments to run ``objcopy``. | 5604 | The minimal command and arguments to run ``objcopy``. |
5590 | 5605 | ||
@@ -6829,6 +6844,19 @@ system and gives an overview of their function and contents. | |||
6829 | explicitly if that will not match the package name (e.g. where the | 6844 | explicitly if that will not match the package name (e.g. where the |
6830 | package name has a prefix, underscores, uppercase letters etc.) | 6845 | package name has a prefix, underscores, uppercase letters etc.) |
6831 | 6846 | ||
6847 | :term:`PYPI_PACKAGE_EXT` | ||
6848 | When inheriting the :ref:`ref-classes-pypi` class, specifies the | ||
6849 | file extension to use when fetching a package from `PyPI | ||
6850 | <https://pypi.org/>`__. Default is ``tar.gz``. | ||
6851 | |||
6852 | :term:`PYPI_SRC_URI` | ||
6853 | When inheriting the :ref:`ref-classes-pypi` class, specifies the | ||
6854 | full `pythonhosted <https://files.pythonhosted.org/>`__ URI for | ||
6855 | fetching the package to be built. The default value is constructed | ||
6856 | based upon :term:`PYPI_PACKAGE`, :term:`PYPI_PACKAGE_EXT`, and | ||
6857 | :term:`PV`. Most recipes will not need to set this variable unless | ||
6858 | they are building an unstable (i.e. development) version. | ||
6859 | |||
6832 | :term:`PYTHON_ABI` | 6860 | :term:`PYTHON_ABI` |
6833 | When used by recipes that inherit the :ref:`ref-classes-setuptools3` | 6861 | When used by recipes that inherit the :ref:`ref-classes-setuptools3` |
6834 | class, denotes the Application Binary Interface (ABI) currently in use | 6862 | class, denotes the Application Binary Interface (ABI) currently in use |
@@ -9670,6 +9698,11 @@ system and gives an overview of their function and contents. | |||
9670 | :ref:`ref-classes-insane` class and is only enabled if the | 9698 | :ref:`ref-classes-insane` class and is only enabled if the |
9671 | recipe inherits the :ref:`ref-classes-autotools` class. | 9699 | recipe inherits the :ref:`ref-classes-autotools` class. |
9672 | 9700 | ||
9701 | :term:`UNPACKDIR` | ||
9702 | This variable, used by the :ref:`ref-classes-base` class, | ||
9703 | specifies where fetches sources should be unpacked by the | ||
9704 | :ref:`ref-tasks-unpack` task. | ||
9705 | |||
9673 | :term:`UPDATERCPN` | 9706 | :term:`UPDATERCPN` |
9674 | For recipes inheriting the | 9707 | For recipes inheriting the |
9675 | :ref:`ref-classes-update-rc.d` class, :term:`UPDATERCPN` | 9708 | :ref:`ref-classes-update-rc.d` class, :term:`UPDATERCPN` |
diff --git a/meta-poky/recipes-core/busybox/busybox/poky-tiny/defconfig b/meta-poky/recipes-core/busybox/busybox/poky-tiny/defconfig index 6e9faa4119..16c1907ae8 100644 --- a/meta-poky/recipes-core/busybox/busybox/poky-tiny/defconfig +++ b/meta-poky/recipes-core/busybox/busybox/poky-tiny/defconfig | |||
@@ -937,8 +937,8 @@ CONFIG_FEATURE_FANCY_PING=y | |||
937 | CONFIG_ROUTE=y | 937 | CONFIG_ROUTE=y |
938 | # CONFIG_SLATTACH is not set | 938 | # CONFIG_SLATTACH is not set |
939 | CONFIG_SSL_CLIENT=y | 939 | CONFIG_SSL_CLIENT=y |
940 | CONFIG_TC=y | 940 | # CONFIG_TC is not set |
941 | CONFIG_FEATURE_TC_INGRESS=y | 941 | # CONFIG_FEATURE_TC_INGRESS is not set |
942 | # CONFIG_TCPSVD is not set | 942 | # CONFIG_TCPSVD is not set |
943 | # CONFIG_UDPSVD is not set | 943 | # CONFIG_UDPSVD is not set |
944 | CONFIG_TELNET=y | 944 | CONFIG_TELNET=y |
diff --git a/meta-poky/recipes-core/tiny-init/tiny-init.bb b/meta-poky/recipes-core/tiny-init/tiny-init.bb index 1de4f5e715..586596259b 100644 --- a/meta-poky/recipes-core/tiny-init/tiny-init.bb +++ b/meta-poky/recipes-core/tiny-init/tiny-init.bb | |||
@@ -11,7 +11,8 @@ SRC_URI = "file://init \ | |||
11 | file://rc.local.sample \ | 11 | file://rc.local.sample \ |
12 | " | 12 | " |
13 | 13 | ||
14 | S = "${WORKDIR}" | 14 | S = "${WORKDIR}/sources" |
15 | UNPACKDIR = "${S}" | ||
15 | 16 | ||
16 | do_configure() { | 17 | do_configure() { |
17 | : | 18 | : |
@@ -23,8 +24,8 @@ do_compile() { | |||
23 | 24 | ||
24 | do_install() { | 25 | do_install() { |
25 | install -d ${D}${sysconfdir} | 26 | install -d ${D}${sysconfdir} |
26 | install -m 0755 ${WORKDIR}/init ${D} | 27 | install -m 0755 ${S}/init ${D} |
27 | install -m 0755 ${WORKDIR}/rc.local.sample ${D}${sysconfdir} | 28 | install -m 0755 ${S}/rc.local.sample ${D}${sysconfdir} |
28 | } | 29 | } |
29 | 30 | ||
30 | FILES:${PN} = "/init ${sysconfdir}/rc.local.sample" | 31 | FILES:${PN} = "/init ${sysconfdir}/rc.local.sample" |
diff --git a/meta-selftest/classes/localpkgfeed.bbclass b/meta-selftest/classes/localpkgfeed.bbclass new file mode 100644 index 0000000000..b796375e55 --- /dev/null +++ b/meta-selftest/classes/localpkgfeed.bbclass | |||
@@ -0,0 +1,27 @@ | |||
1 | # Create a subset of the package feed that just contain the | ||
2 | # packages depended on by this recipe. | ||
3 | |||
4 | LOCALPKGFEED_DIR = "${WORKDIR}/localpkgfeed" | ||
5 | |||
6 | addtask localpkgfeed after do_build | ||
7 | do_localpkgfeed[cleandirs] = "${LOCALPKGFEED_DIR}" | ||
8 | do_localpkgfeed[nostamp] = "1" | ||
9 | |||
10 | def get_packaging_class(d): | ||
11 | package_class = d.getVar("PACKAGE_CLASSES").split()[0] | ||
12 | return package_class.replace("package_", "") | ||
13 | |||
14 | python () { | ||
15 | packaging = get_packaging_class(d) | ||
16 | d.setVarFlag("do_localpkgfeed", "rdeptask", "do_package_write_" + packaging) | ||
17 | } | ||
18 | |||
19 | python do_localpkgfeed() { | ||
20 | import oe.package_manager | ||
21 | |||
22 | packaging = get_packaging_class(d) | ||
23 | deploydir = d.getVar("DEPLOY_DIR_" + packaging.upper()) | ||
24 | task = "package_write_" + packaging | ||
25 | |||
26 | oe.package_manager.create_packages_dir(d, d.getVar("LOCALPKGFEED_DIR"), deploydir, task, True, True) | ||
27 | } | ||
diff --git a/meta-selftest/recipes-test/cpp/cpp-example.inc b/meta-selftest/recipes-test/cpp/cpp-example.inc index ad374be9d0..41aaa7219a 100644 --- a/meta-selftest/recipes-test/cpp/cpp-example.inc +++ b/meta-selftest/recipes-test/cpp/cpp-example.inc | |||
@@ -19,6 +19,7 @@ SRC_URI = "\ | |||
19 | file://run-ptest \ | 19 | file://run-ptest \ |
20 | " | 20 | " |
21 | 21 | ||
22 | S = "${WORKDIR}" | 22 | S = "${WORKDIR}/sources" |
23 | UNPACKDIR = "${S}" | ||
23 | 24 | ||
24 | inherit ptest | 25 | inherit ptest |
diff --git a/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb b/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb index e767619879..446c51f09b 100644 --- a/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb +++ b/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb | |||
@@ -6,5 +6,8 @@ SRC_URI = "file://file1 \ | |||
6 | 6 | ||
7 | SRC_URI:append:class-native = " file://file3" | 7 | SRC_URI:append:class-native = " file://file3" |
8 | 8 | ||
9 | S = "${WORKDIR}/sources" | ||
10 | UNPACKDIR = "${S}" | ||
11 | |||
9 | EXCLUDE_FROM_WORLD = "1" | 12 | EXCLUDE_FROM_WORLD = "1" |
10 | BBCLASSEXTEND = "native" | 13 | BBCLASSEXTEND = "native" |
diff --git a/meta-selftest/recipes-test/recipeutils/recipeutils-test_1.2.bb b/meta-selftest/recipes-test/recipeutils/recipeutils-test_1.2.bb index ad9f475d15..8b314d396e 100644 --- a/meta-selftest/recipes-test/recipeutils/recipeutils-test_1.2.bb +++ b/meta-selftest/recipes-test/recipeutils/recipeutils-test_1.2.bb | |||
@@ -3,9 +3,12 @@ SUMMARY = "Test recipe for recipeutils.patch_recipe()" | |||
3 | require recipeutils-test.inc | 3 | require recipeutils-test.inc |
4 | 4 | ||
5 | LICENSE = "HPND" | 5 | LICENSE = "HPND" |
6 | LIC_FILES_CHKSUM = "file://${WORKDIR}/somefile;md5=d41d8cd98f00b204e9800998ecf8427e" | 6 | LIC_FILES_CHKSUM = "file://${UNPACKDIR}/somefile;md5=d41d8cd98f00b204e9800998ecf8427e" |
7 | DEPENDS += "zlib" | 7 | DEPENDS += "zlib" |
8 | 8 | ||
9 | S = "${WORKDIR}/sources" | ||
10 | UNPACKDIR = "${S}" | ||
11 | |||
9 | BBCLASSEXTEND = "native nativesdk" | 12 | BBCLASSEXTEND = "native nativesdk" |
10 | 13 | ||
11 | SRC_URI += "file://somefile" | 14 | SRC_URI += "file://somefile" |
diff --git a/meta-selftest/recipes-test/selftest-chown/selftest-chown.bb b/meta-selftest/recipes-test/selftest-chown/selftest-chown.bb index aa6ce0c2a0..a0eeb4f223 100644 --- a/meta-selftest/recipes-test/selftest-chown/selftest-chown.bb +++ b/meta-selftest/recipes-test/selftest-chown/selftest-chown.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | DEPENDS += "coreutils-native" | 6 | DEPENDS += "coreutils-native" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | inherit useradd allarch | 11 | inherit useradd allarch |
11 | 12 | ||
diff --git a/meta-selftest/recipes-test/selftest-hardlink/selftest-hardlink.bb b/meta-selftest/recipes-test/selftest-hardlink/selftest-hardlink.bb index 49c3fe827a..5632bdac7a 100644 --- a/meta-selftest/recipes-test/selftest-hardlink/selftest-hardlink.bb +++ b/meta-selftest/recipes-test/selftest-hardlink/selftest-hardlink.bb | |||
@@ -6,7 +6,8 @@ SRC_URI = "file://hello.c \ | |||
6 | file://gdb.sh \ | 6 | file://gdb.sh \ |
7 | " | 7 | " |
8 | 8 | ||
9 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
10 | 11 | ||
11 | do_compile () { | 12 | do_compile () { |
12 | ${CC} hello.c -o hello1 ${CFLAGS} ${LDFLAGS} | 13 | ${CC} hello.c -o hello1 ${CFLAGS} ${LDFLAGS} |
diff --git a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb index 547587bef4..2dc352d479 100644 --- a/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb +++ b/meta-selftest/recipes-test/selftest-hello/selftest-hello_1.0.bb | |||
@@ -5,7 +5,8 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda | |||
5 | 5 | ||
6 | SRC_URI = "file://helloworld.c" | 6 | SRC_URI = "file://helloworld.c" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | do_compile() { | 11 | do_compile() { |
11 | ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld | 12 | ${CC} ${CFLAGS} ${LDFLAGS} helloworld.c -o helloworld |
@@ -16,4 +17,4 @@ do_install() { | |||
16 | install -m 0755 helloworld ${D}${bindir} | 17 | install -m 0755 helloworld ${D}${bindir} |
17 | } | 18 | } |
18 | 19 | ||
19 | BBCLASSEXTEND = "native nativesdk" \ No newline at end of file | 20 | BBCLASSEXTEND = "native nativesdk" |
diff --git a/meta-selftest/recipes-test/selftest-users/acreategroup.bb b/meta-selftest/recipes-test/selftest-users/acreategroup.bb index 66ed5695a2..7805182d48 100644 --- a/meta-selftest/recipes-test/selftest-users/acreategroup.bb +++ b/meta-selftest/recipes-test/selftest-users/acreategroup.bb | |||
@@ -3,7 +3,8 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda | |||
3 | 3 | ||
4 | LICENSE = "MIT" | 4 | LICENSE = "MIT" |
5 | 5 | ||
6 | S = "${WORKDIR}" | 6 | S = "${WORKDIR}/sources" |
7 | UNPACKDIR = "${S}" | ||
7 | 8 | ||
8 | EXCLUDE_FROM_WORLD="1" | 9 | EXCLUDE_FROM_WORLD="1" |
9 | 10 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/bcreategroup.bb b/meta-selftest/recipes-test/selftest-users/bcreategroup.bb index c4844dd0da..b15c07d7b2 100644 --- a/meta-selftest/recipes-test/selftest-users/bcreategroup.bb +++ b/meta-selftest/recipes-test/selftest-users/bcreategroup.bb | |||
@@ -7,7 +7,8 @@ LICENSE = "MIT" | |||
7 | 7 | ||
8 | USERADD_DEPENDS = "acreategroup ccreategroup" | 8 | USERADD_DEPENDS = "acreategroup ccreategroup" |
9 | 9 | ||
10 | S = "${WORKDIR}" | 10 | S = "${WORKDIR}/sources" |
11 | UNPACKDIR = "${S}" | ||
11 | 12 | ||
12 | EXCLUDE_FROM_WORLD="1" | 13 | EXCLUDE_FROM_WORLD="1" |
13 | 14 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/ccreategroup.bb b/meta-selftest/recipes-test/selftest-users/ccreategroup.bb index 021b1ebbf7..ff2da1c039 100644 --- a/meta-selftest/recipes-test/selftest-users/ccreategroup.bb +++ b/meta-selftest/recipes-test/selftest-users/ccreategroup.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | USERADD_DEPENDS = "acreategroup" | 6 | USERADD_DEPENDS = "acreategroup" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | EXCLUDE_FROM_WORLD="1" | 11 | EXCLUDE_FROM_WORLD="1" |
11 | 12 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/creategroup1.bb b/meta-selftest/recipes-test/selftest-users/creategroup1.bb index afd23ed1ee..4ab278f589 100644 --- a/meta-selftest/recipes-test/selftest-users/creategroup1.bb +++ b/meta-selftest/recipes-test/selftest-users/creategroup1.bb | |||
@@ -3,7 +3,8 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda | |||
3 | 3 | ||
4 | LICENSE = "MIT" | 4 | LICENSE = "MIT" |
5 | 5 | ||
6 | S = "${WORKDIR}" | 6 | S = "${WORKDIR}/sources" |
7 | UNPACKDIR = "${S}" | ||
7 | 8 | ||
8 | inherit useradd allarch | 9 | inherit useradd allarch |
9 | 10 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/creategroup2.bb b/meta-selftest/recipes-test/selftest-users/creategroup2.bb index f776f43aed..179aba9bfc 100644 --- a/meta-selftest/recipes-test/selftest-users/creategroup2.bb +++ b/meta-selftest/recipes-test/selftest-users/creategroup2.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | USERADD_DEPENDS = "creategroup1" | 6 | USERADD_DEPENDS = "creategroup1" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | inherit useradd allarch | 11 | inherit useradd allarch |
11 | 12 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/dcreategroup.bb b/meta-selftest/recipes-test/selftest-users/dcreategroup.bb index b96ca92a16..ab0a529669 100644 --- a/meta-selftest/recipes-test/selftest-users/dcreategroup.bb +++ b/meta-selftest/recipes-test/selftest-users/dcreategroup.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | USERADD_DEPENDS = "bcreategroup" | 6 | USERADD_DEPENDS = "bcreategroup" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | EXCLUDE_FROM_WORLD="1" | 11 | EXCLUDE_FROM_WORLD="1" |
11 | 12 | ||
diff --git a/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb b/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb index 99e04a80b3..2863541010 100644 --- a/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb +++ b/meta-selftest/recipes-test/selftest-users/useraddbadtask.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | DEPENDS:append = "coreutils-native" | 6 | DEPENDS:append = "coreutils-native" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | inherit useradd allarch | 11 | inherit useradd allarch |
11 | 12 | ||
diff --git a/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb b/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb index 5e8761ab55..db674d0efc 100644 --- a/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb +++ b/meta-selftest/recipes-test/testrpm/testrpm_0.0.1.bb | |||
@@ -6,6 +6,9 @@ LICENSE = "MIT" | |||
6 | SRC_URI = "file://testfile.txt" | 6 | SRC_URI = "file://testfile.txt" |
7 | INHIBIT_DEFAULT_DEPS = "1" | 7 | INHIBIT_DEFAULT_DEPS = "1" |
8 | 8 | ||
9 | S = "${WORKDIR}/sources" | ||
10 | UNPACKDIR = "${S}" | ||
11 | |||
9 | do_compile(){ | 12 | do_compile(){ |
10 | echo "testdata" > ${B}/"file with [brackets].txt" | 13 | echo "testdata" > ${B}/"file with [brackets].txt" |
11 | echo "testdata" > ${B}/"file with (parentheses).txt" | 14 | echo "testdata" > ${B}/"file with (parentheses).txt" |
diff --git a/meta-skeleton/recipes-kernel/hello-mod/hello-mod_0.1.bb b/meta-skeleton/recipes-kernel/hello-mod/hello-mod_0.1.bb index a2fb212a68..79f2e8a092 100644 --- a/meta-skeleton/recipes-kernel/hello-mod/hello-mod_0.1.bb +++ b/meta-skeleton/recipes-kernel/hello-mod/hello-mod_0.1.bb | |||
@@ -10,7 +10,8 @@ SRC_URI = "file://Makefile \ | |||
10 | file://COPYING \ | 10 | file://COPYING \ |
11 | " | 11 | " |
12 | 12 | ||
13 | S = "${WORKDIR}" | 13 | S = "${WORKDIR}/sources" |
14 | UNPACKDIR = "${S}" | ||
14 | 15 | ||
15 | # The inherit of module.bbclass will automatically name module packages with | 16 | # The inherit of module.bbclass will automatically name module packages with |
16 | # "kernel-module-" prefix as required by the oe-core build environment. | 17 | # "kernel-module-" prefix as required by the oe-core build environment. |
diff --git a/meta-skeleton/recipes-skeleton/hello-single/hello_1.0.bb b/meta-skeleton/recipes-skeleton/hello-single/hello_1.0.bb index 90d3aefd86..8be7980919 100644 --- a/meta-skeleton/recipes-skeleton/hello-single/hello_1.0.bb +++ b/meta-skeleton/recipes-skeleton/hello-single/hello_1.0.bb | |||
@@ -5,7 +5,8 @@ LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda | |||
5 | 5 | ||
6 | SRC_URI = "file://helloworld.c" | 6 | SRC_URI = "file://helloworld.c" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | do_compile() { | 11 | do_compile() { |
11 | ${CC} ${LDFLAGS} helloworld.c -o helloworld | 12 | ${CC} ${LDFLAGS} helloworld.c -o helloworld |
diff --git a/meta-skeleton/recipes-skeleton/service/service_0.1.bb b/meta-skeleton/recipes-skeleton/service/service_0.1.bb index 912f6b0f61..54b834d45f 100644 --- a/meta-skeleton/recipes-skeleton/service/service_0.1.bb +++ b/meta-skeleton/recipes-skeleton/service/service_0.1.bb | |||
@@ -2,22 +2,23 @@ SUMMARY = "The canonical example of init scripts" | |||
2 | SECTION = "base" | 2 | SECTION = "base" |
3 | DESCRIPTION = "This recipe is a canonical example of init scripts" | 3 | DESCRIPTION = "This recipe is a canonical example of init scripts" |
4 | LICENSE = "GPL-2.0-only" | 4 | LICENSE = "GPL-2.0-only" |
5 | LIC_FILES_CHKSUM = "file://${WORKDIR}/COPYRIGHT;md5=349c872e0066155e1818b786938876a4" | 5 | LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=349c872e0066155e1818b786938876a4" |
6 | 6 | ||
7 | SRC_URI = "file://skeleton \ | 7 | SRC_URI = "file://skeleton \ |
8 | file://skeleton_test.c \ | 8 | file://skeleton_test.c \ |
9 | file://COPYRIGHT \ | 9 | file://COPYRIGHT \ |
10 | " | 10 | " |
11 | 11 | ||
12 | S = "${WORKDIR}" | 12 | S = "${WORKDIR}/sources" |
13 | UNPACKDIR = "${S}" | ||
13 | 14 | ||
14 | do_compile () { | 15 | do_compile () { |
15 | ${CC} ${CFLAGS} ${LDFLAGS} ${WORKDIR}/skeleton_test.c -o ${WORKDIR}/skeleton-test | 16 | ${CC} ${CFLAGS} ${LDFLAGS} ${S}/skeleton_test.c -o ${B}/skeleton-test |
16 | } | 17 | } |
17 | 18 | ||
18 | do_install () { | 19 | do_install () { |
19 | install -d ${D}${sysconfdir}/init.d | 20 | install -d ${D}${sysconfdir}/init.d |
20 | cat ${WORKDIR}/skeleton | \ | 21 | cat ${S}/skeleton | \ |
21 | sed -e 's,/etc,${sysconfdir},g' \ | 22 | sed -e 's,/etc,${sysconfdir},g' \ |
22 | -e 's,/usr/sbin,${sbindir},g' \ | 23 | -e 's,/usr/sbin,${sbindir},g' \ |
23 | -e 's,/var,${localstatedir},g' \ | 24 | -e 's,/var,${localstatedir},g' \ |
@@ -26,7 +27,7 @@ do_install () { | |||
26 | chmod a+x ${D}${sysconfdir}/init.d/skeleton | 27 | chmod a+x ${D}${sysconfdir}/init.d/skeleton |
27 | 28 | ||
28 | install -d ${D}${sbindir} | 29 | install -d ${D}${sbindir} |
29 | install -m 0755 ${WORKDIR}/skeleton-test ${D}${sbindir}/ | 30 | install -m 0755 ${S}/skeleton-test ${D}${sbindir}/ |
30 | } | 31 | } |
31 | 32 | ||
32 | RDEPENDS:${PN} = "initscripts" | 33 | RDEPENDS:${PN} = "initscripts" |
diff --git a/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb b/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb index 8437a5a774..b0d96e7f71 100644 --- a/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb +++ b/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb | |||
@@ -9,7 +9,8 @@ SRC_URI = "file://file1 \ | |||
9 | file://file3 \ | 9 | file://file3 \ |
10 | file://file4" | 10 | file://file4" |
11 | 11 | ||
12 | S = "${WORKDIR}" | 12 | S = "${WORKDIR}/sources" |
13 | UNPACKDIR = "${S}" | ||
13 | 14 | ||
14 | PACKAGES =+ "${PN}-user3" | 15 | PACKAGES =+ "${PN}-user3" |
15 | 16 | ||
diff --git a/meta-yocto-bsp/conf/machine/genericarm64.conf b/meta-yocto-bsp/conf/machine/genericarm64.conf index 4fa9395b31..1cb5e46dcf 100644 --- a/meta-yocto-bsp/conf/machine/genericarm64.conf +++ b/meta-yocto-bsp/conf/machine/genericarm64.conf | |||
@@ -58,3 +58,6 @@ QB_SERIAL_OPT = "-device virtio-serial-pci -chardev null,id=virtcon -device virt | |||
58 | QB_TCPSERIAL_OPT = "-device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon" | 58 | QB_TCPSERIAL_OPT = "-device virtio-serial-pci -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1,nodelay=on -device virtconsole,chardev=virtcon" |
59 | # Virtio networking | 59 | # Virtio networking |
60 | QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no" | 60 | QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no" |
61 | |||
62 | # If we're running testimage then we're in a qemu, so ensure u-boot is build | ||
63 | TESTIMAGEDEPENDS:append = " u-boot:do_deploy" | ||
diff --git a/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py b/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py index 6ce9a3b3f8..6fc6925f69 100644 --- a/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py +++ b/meta-yocto-bsp/lib/oeqa/selftest/cases/systemd_boot.py | |||
@@ -18,7 +18,8 @@ class Systemdboot(OESelftestTestCase): | |||
18 | 18 | ||
19 | # Set EFI_PROVIDER = "systemdboot" and MACHINE = "genericx86-64" in conf/local.conf | 19 | # Set EFI_PROVIDER = "systemdboot" and MACHINE = "genericx86-64" in conf/local.conf |
20 | features = 'EFI_PROVIDER = "systemd-boot"\n' | 20 | features = 'EFI_PROVIDER = "systemd-boot"\n' |
21 | features += 'MACHINE = "genericx86-64"' | 21 | features += 'MACHINE = "genericx86-64"\n' |
22 | features += 'COMPATIBLE_MACHINE:pn-ssh-pregen-hostkeys:genericx86-64 = "genericx86-64"\n' | ||
22 | self.append_config(features) | 23 | self.append_config(features) |
23 | 24 | ||
24 | image = 'core-image-minimal' | 25 | image = 'core-image-minimal' |
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass index 066f3848f7..b6940bbb6f 100644 --- a/meta/classes-global/base.bbclass +++ b/meta/classes-global/base.bbclass | |||
@@ -153,20 +153,38 @@ python base_do_fetch() { | |||
153 | } | 153 | } |
154 | 154 | ||
155 | addtask unpack after do_fetch | 155 | addtask unpack after do_fetch |
156 | do_unpack[dirs] = "${UNPACKDIR}" | 156 | do_unpack[cleandirs] = "${UNPACKDIR}" |
157 | |||
158 | do_unpack[cleandirs] = "${@d.getVar('S') if os.path.normpath(d.getVar('S')) != os.path.normpath(d.getVar('WORKDIR')) else os.path.join('${S}', 'patches')}" | ||
159 | 157 | ||
160 | python base_do_unpack() { | 158 | python base_do_unpack() { |
159 | import shutil | ||
160 | |||
161 | sourcedir = d.getVar('S') | ||
162 | # Intentionally keep SOURCE_BASEDIR internal to the task just for SDE | ||
163 | d.setVar("SOURCE_BASEDIR", sourcedir) | ||
164 | |||
161 | src_uri = (d.getVar('SRC_URI') or "").split() | 165 | src_uri = (d.getVar('SRC_URI') or "").split() |
162 | if not src_uri: | 166 | if not src_uri: |
163 | return | 167 | return |
164 | 168 | ||
169 | basedir = None | ||
170 | unpackdir = d.getVar('UNPACKDIR') | ||
171 | workdir = d.getVar('WORKDIR') | ||
172 | if sourcedir.startswith(workdir) and not sourcedir.startswith(unpackdir): | ||
173 | basedir = sourcedir.replace(workdir, '').strip("/").split('/')[0] | ||
174 | if basedir: | ||
175 | bb.utils.remove(workdir + '/' + basedir, True) | ||
176 | d.setVar("SOURCE_BASEDIR", workdir + '/' + basedir) | ||
177 | |||
165 | try: | 178 | try: |
166 | fetcher = bb.fetch2.Fetch(src_uri, d) | 179 | fetcher = bb.fetch2.Fetch(src_uri, d) |
167 | fetcher.unpack(d.getVar('UNPACKDIR')) | 180 | fetcher.unpack(d.getVar('UNPACKDIR')) |
168 | except bb.fetch2.BBFetchException as e: | 181 | except bb.fetch2.BBFetchException as e: |
169 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) | 182 | bb.fatal("Bitbake Fetcher Error: " + repr(e)) |
183 | |||
184 | if basedir and os.path.exists(unpackdir + '/' + basedir): | ||
185 | # Compatibility magic to ensure ${WORKDIR}/git and ${WORKDIR}/${BP} | ||
186 | # as often used in S work as expected. | ||
187 | shutil.move(unpackdir + '/' + basedir, workdir + '/' + basedir) | ||
170 | } | 188 | } |
171 | 189 | ||
172 | SSTATETASKS += "do_deploy_source_date_epoch" | 190 | SSTATETASKS += "do_deploy_source_date_epoch" |
@@ -199,8 +217,8 @@ addtask do_deploy_source_date_epoch_setscene | |||
199 | addtask do_deploy_source_date_epoch before do_configure after do_patch | 217 | addtask do_deploy_source_date_epoch before do_configure after do_patch |
200 | 218 | ||
201 | python create_source_date_epoch_stamp() { | 219 | python create_source_date_epoch_stamp() { |
202 | # Version: 1 | 220 | # Version: 2 |
203 | source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('S')) | 221 | source_date_epoch = oe.reproducible.get_source_date_epoch(d, d.getVar('SOURCE_BASEDIR') or d.getVar('S')) |
204 | oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) | 222 | oe.reproducible.epochfile_write(source_date_epoch, d.getVar('SDE_FILE'), d) |
205 | } | 223 | } |
206 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" | 224 | do_unpack[postfuncs] += "create_source_date_epoch_stamp" |
@@ -410,16 +428,6 @@ python () { | |||
410 | oe.utils.features_backfill("DISTRO_FEATURES", d) | 428 | oe.utils.features_backfill("DISTRO_FEATURES", d) |
411 | oe.utils.features_backfill("MACHINE_FEATURES", d) | 429 | oe.utils.features_backfill("MACHINE_FEATURES", d) |
412 | 430 | ||
413 | if d.getVar("S")[-1] == '/': | ||
414 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | ||
415 | if d.getVar("B")[-1] == '/': | ||
416 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | ||
417 | |||
418 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("S")): | ||
419 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${S}") | ||
420 | if os.path.normpath(d.getVar("WORKDIR")) != os.path.normpath(d.getVar("B")): | ||
421 | d.appendVar("PSEUDO_IGNORE_PATHS", ",${B}") | ||
422 | |||
423 | # To add a recipe to the skip list , set: | 431 | # To add a recipe to the skip list , set: |
424 | # SKIP_RECIPE[pn] = "message" | 432 | # SKIP_RECIPE[pn] = "message" |
425 | pn = d.getVar('PN') | 433 | pn = d.getVar('PN') |
diff --git a/meta/classes-global/insane.bbclass b/meta/classes-global/insane.bbclass index c32dfffd83..99736830b9 100644 --- a/meta/classes-global/insane.bbclass +++ b/meta/classes-global/insane.bbclass | |||
@@ -1399,7 +1399,7 @@ python do_qa_patch() { | |||
1399 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) | 1399 | oe.qa.handle_error("unimplemented-ptest", "%s: autotools-based tests detected" % d.getVar('PN'), d) |
1400 | 1400 | ||
1401 | # Last resort, detect a test directory in sources | 1401 | # Last resort, detect a test directory in sources |
1402 | elif any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): | 1402 | elif os.path.exists(srcdir) and any(filename.lower() in ["test", "tests"] for filename in os.listdir(srcdir)): |
1403 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) | 1403 | oe.qa.handle_error("unimplemented-ptest", "%s: test subdirectory detected" % d.getVar('PN'), d) |
1404 | 1404 | ||
1405 | oe.qa.exit_if_errors(d) | 1405 | oe.qa.exit_if_errors(d) |
@@ -1602,6 +1602,18 @@ python () { | |||
1602 | if prog.search(pn): | 1602 | if prog.search(pn): |
1603 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) | 1603 | oe.qa.handle_error("uppercase-pn", 'PN: %s is upper case, this can result in unexpected behavior.' % pn, d) |
1604 | 1604 | ||
1605 | sourcedir = d.getVar("S") | ||
1606 | builddir = d.getVar("B") | ||
1607 | workdir = d.getVar("WORKDIR") | ||
1608 | if sourcedir == workdir: | ||
1609 | bb.fatal("Using S = ${WORKDIR} is no longer supported") | ||
1610 | if builddir == workdir: | ||
1611 | bb.fatal("Using B = ${WORKDIR} is no longer supported") | ||
1612 | if sourcedir[-1] == '/': | ||
1613 | bb.warn("Recipe %s sets S variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("S"))) | ||
1614 | if builddir[-1] == '/': | ||
1615 | bb.warn("Recipe %s sets B variable with trailing slash '%s', remove it" % (d.getVar("PN"), d.getVar("B"))) | ||
1616 | |||
1605 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder | 1617 | # Some people mistakenly use DEPENDS:${PN} instead of DEPENDS and wonder |
1606 | # why it doesn't work. | 1618 | # why it doesn't work. |
1607 | if (d.getVar(d.expand('DEPENDS:${PN}'))): | 1619 | if (d.getVar(d.expand('DEPENDS:${PN}'))): |
diff --git a/meta/classes-global/sanity.bbclass b/meta/classes-global/sanity.bbclass index 180c6b77d8..1d242f0f0a 100644 --- a/meta/classes-global/sanity.bbclass +++ b/meta/classes-global/sanity.bbclass | |||
@@ -495,12 +495,15 @@ def check_gcc_version(sanity_data): | |||
495 | # Tar version 1.24 and onwards handle overwriting symlinks correctly | 495 | # Tar version 1.24 and onwards handle overwriting symlinks correctly |
496 | # but earlier versions do not; this needs to work properly for sstate | 496 | # but earlier versions do not; this needs to work properly for sstate |
497 | # Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled | 497 | # Version 1.28 is needed so opkg-build works correctly when reproducible builds are enabled |
498 | # Gtar is assumed at to be used as tar in poky | ||
498 | def check_tar_version(sanity_data): | 499 | def check_tar_version(sanity_data): |
499 | import subprocess | 500 | import subprocess |
500 | try: | 501 | try: |
501 | result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') | 502 | result = subprocess.check_output(["tar", "--version"], stderr=subprocess.STDOUT).decode('utf-8') |
502 | except subprocess.CalledProcessError as e: | 503 | except subprocess.CalledProcessError as e: |
503 | return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) | 504 | return "Unable to execute tar --version, exit code %d\n%s\n" % (e.returncode, e.output) |
505 | if not "GNU" in result: | ||
506 | return "Your version of tar is not gtar. Please install gtar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" | ||
504 | version = result.split()[3] | 507 | version = result.split()[3] |
505 | if bb.utils.vercmp_string_op(version, "1.28", "<"): | 508 | if bb.utils.vercmp_string_op(version, "1.28", "<"): |
506 | return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" | 509 | return "Your version of tar is older than 1.28 and does not have the support needed to enable reproducible builds. Please install a newer version of tar (you could use the project's buildtools-tarball from our last release or use scripts/install-buildtools).\n" |
diff --git a/meta/classes-global/sstate.bbclass b/meta/classes-global/sstate.bbclass index 76a7b59636..beb22f424e 100644 --- a/meta/classes-global/sstate.bbclass +++ b/meta/classes-global/sstate.bbclass | |||
@@ -189,7 +189,6 @@ def sstate_state_fromvars(d, task = None): | |||
189 | plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split() | 189 | plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split() |
190 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() | 190 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() |
191 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() | 191 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() |
192 | interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split() | ||
193 | fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" | 192 | fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" |
194 | if not task or len(inputs) != len(outputs): | 193 | if not task or len(inputs) != len(outputs): |
195 | bb.fatal("sstate variables not setup correctly?!") | 194 | bb.fatal("sstate variables not setup correctly?!") |
@@ -205,7 +204,6 @@ def sstate_state_fromvars(d, task = None): | |||
205 | ss['lockfiles'] = lockfiles | 204 | ss['lockfiles'] = lockfiles |
206 | ss['lockfiles-shared'] = lockfilesshared | 205 | ss['lockfiles-shared'] = lockfilesshared |
207 | ss['plaindirs'] = plaindirs | 206 | ss['plaindirs'] = plaindirs |
208 | ss['interceptfuncs'] = interceptfuncs | ||
209 | ss['fixmedir'] = fixmedir | 207 | ss['fixmedir'] = fixmedir |
210 | return ss | 208 | return ss |
211 | 209 | ||
@@ -225,12 +223,23 @@ def sstate_install(ss, d): | |||
225 | import oe.sstatesig | 223 | import oe.sstatesig |
226 | import subprocess | 224 | import subprocess |
227 | 225 | ||
226 | def prepdir(dir): | ||
227 | # remove dir if it exists, ensure any parent directories do exist | ||
228 | if os.path.exists(dir): | ||
229 | oe.path.remove(dir) | ||
230 | bb.utils.mkdirhier(dir) | ||
231 | oe.path.remove(dir) | ||
232 | |||
233 | sstateinst = d.getVar("SSTATE_INSTDIR") | ||
234 | |||
235 | for state in ss['dirs']: | ||
236 | prepdir(state[1]) | ||
237 | bb.utils.rename(sstateinst + state[0], state[1]) | ||
238 | |||
228 | sharedfiles = [] | 239 | sharedfiles = [] |
229 | shareddirs = [] | 240 | shareddirs = [] |
230 | bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) | 241 | bb.utils.mkdirhier(d.expand("${SSTATE_MANIFESTS}")) |
231 | 242 | ||
232 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) | ||
233 | |||
234 | manifest, d2 = oe.sstatesig.sstate_get_manifest_filename(ss['task'], d) | 243 | manifest, d2 = oe.sstatesig.sstate_get_manifest_filename(ss['task'], d) |
235 | 244 | ||
236 | if os.access(manifest, os.R_OK): | 245 | if os.access(manifest, os.R_OK): |
@@ -329,6 +338,17 @@ def sstate_install(ss, d): | |||
329 | if os.path.exists(state[1]): | 338 | if os.path.exists(state[1]): |
330 | oe.path.copyhardlinktree(state[1], state[2]) | 339 | oe.path.copyhardlinktree(state[1], state[2]) |
331 | 340 | ||
341 | for plain in ss['plaindirs']: | ||
342 | workdir = d.getVar('WORKDIR') | ||
343 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") | ||
344 | src = sstateinst + "/" + plain.replace(workdir, '') | ||
345 | if sharedworkdir in plain: | ||
346 | src = sstateinst + "/" + plain.replace(sharedworkdir, '') | ||
347 | dest = plain | ||
348 | bb.utils.mkdirhier(src) | ||
349 | prepdir(dest) | ||
350 | bb.utils.rename(src, dest) | ||
351 | |||
332 | for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split(): | 352 | for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split(): |
333 | # All hooks should run in the SSTATE_INSTDIR | 353 | # All hooks should run in the SSTATE_INSTDIR |
334 | bb.build.exec_func(postinst, d, (sstateinst,)) | 354 | bb.build.exec_func(postinst, d, (sstateinst,)) |
@@ -393,29 +413,8 @@ def sstate_installpkgdir(ss, d): | |||
393 | # All hooks should run in the SSTATE_INSTDIR | 413 | # All hooks should run in the SSTATE_INSTDIR |
394 | bb.build.exec_func(f, d, (sstateinst,)) | 414 | bb.build.exec_func(f, d, (sstateinst,)) |
395 | 415 | ||
396 | def prepdir(dir): | ||
397 | # remove dir if it exists, ensure any parent directories do exist | ||
398 | if os.path.exists(dir): | ||
399 | oe.path.remove(dir) | ||
400 | bb.utils.mkdirhier(dir) | ||
401 | oe.path.remove(dir) | ||
402 | |||
403 | for state in ss['dirs']: | ||
404 | prepdir(state[1]) | ||
405 | bb.utils.rename(sstateinst + state[0], state[1]) | ||
406 | sstate_install(ss, d) | 416 | sstate_install(ss, d) |
407 | 417 | ||
408 | for plain in ss['plaindirs']: | ||
409 | workdir = d.getVar('WORKDIR') | ||
410 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), "work-shared") | ||
411 | src = sstateinst + "/" + plain.replace(workdir, '') | ||
412 | if sharedworkdir in plain: | ||
413 | src = sstateinst + "/" + plain.replace(sharedworkdir, '') | ||
414 | dest = plain | ||
415 | bb.utils.mkdirhier(src) | ||
416 | prepdir(dest) | ||
417 | bb.utils.rename(src, dest) | ||
418 | |||
419 | return True | 418 | return True |
420 | 419 | ||
421 | python sstate_hardcode_path_unpack () { | 420 | python sstate_hardcode_path_unpack () { |
@@ -790,9 +789,6 @@ sstate_task_prefunc[dirs] = "${WORKDIR}" | |||
790 | python sstate_task_postfunc () { | 789 | python sstate_task_postfunc () { |
791 | shared_state = sstate_state_fromvars(d) | 790 | shared_state = sstate_state_fromvars(d) |
792 | 791 | ||
793 | for intercept in shared_state['interceptfuncs']: | ||
794 | bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),)) | ||
795 | |||
796 | omask = os.umask(0o002) | 792 | omask = os.umask(0o002) |
797 | if omask != 0o002: | 793 | if omask != 0o002: |
798 | bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) | 794 | bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) |
diff --git a/meta/classes-recipe/autotools.bbclass b/meta/classes-recipe/autotools.bbclass index 9359c9b4e1..7ee1b0d9c7 100644 --- a/meta/classes-recipe/autotools.bbclass +++ b/meta/classes-recipe/autotools.bbclass | |||
@@ -254,8 +254,6 @@ autotools_do_install() { | |||
254 | fi | 254 | fi |
255 | } | 255 | } |
256 | 256 | ||
257 | inherit siteconfig | ||
258 | |||
259 | EXPORT_FUNCTIONS do_configure do_compile do_install | 257 | EXPORT_FUNCTIONS do_configure do_compile do_install |
260 | 258 | ||
261 | B = "${WORKDIR}/build" | 259 | B = "${WORKDIR}/build" |
diff --git a/meta/classes-recipe/baremetal-image.bbclass b/meta/classes-recipe/baremetal-image.bbclass index b9a584351a..4e7d413626 100644 --- a/meta/classes-recipe/baremetal-image.bbclass +++ b/meta/classes-recipe/baremetal-image.bbclass | |||
@@ -103,7 +103,7 @@ QB_OPT_APPEND:append:qemuriscv32 = " -bios none" | |||
103 | # since medlow can only access addresses below 0x80000000 and RAM | 103 | # since medlow can only access addresses below 0x80000000 and RAM |
104 | # starts at 0x80000000 on RISC-V 64 | 104 | # starts at 0x80000000 on RISC-V 64 |
105 | # Keep RISC-V 32 using -mcmodel=medlow (symbols lie between -2GB:2GB) | 105 | # Keep RISC-V 32 using -mcmodel=medlow (symbols lie between -2GB:2GB) |
106 | CFLAGS:append:qemuriscv64 = " -mcmodel=medany" | 106 | TARGET_CFLAGS:append:qemuriscv64 = " -mcmodel=medany" |
107 | 107 | ||
108 | 108 | ||
109 | ## Emulate image.bbclass | 109 | ## Emulate image.bbclass |
diff --git a/meta/classes-recipe/cargo_common.bbclass b/meta/classes-recipe/cargo_common.bbclass index 0fb443edbd..78440c810b 100644 --- a/meta/classes-recipe/cargo_common.bbclass +++ b/meta/classes-recipe/cargo_common.bbclass | |||
@@ -18,7 +18,7 @@ | |||
18 | inherit rust-common | 18 | inherit rust-common |
19 | 19 | ||
20 | # Where we download our registry and dependencies to | 20 | # Where we download our registry and dependencies to |
21 | export CARGO_HOME = "${WORKDIR}/cargo_home" | 21 | export CARGO_HOME = "${UNPACKDIR}/cargo_home" |
22 | 22 | ||
23 | # The pkg-config-rs library used by cargo build scripts disables itself when | 23 | # The pkg-config-rs library used by cargo build scripts disables itself when |
24 | # cross compiling unless this is defined. We set up pkg-config appropriately | 24 | # cross compiling unless this is defined. We set up pkg-config appropriately |
@@ -138,7 +138,7 @@ python cargo_common_do_patch_paths() { | |||
138 | return | 138 | return |
139 | 139 | ||
140 | patches = dict() | 140 | patches = dict() |
141 | workdir = d.getVar('WORKDIR') | 141 | workdir = d.getVar('UNPACKDIR') |
142 | fetcher = bb.fetch2.Fetch(src_uri, d) | 142 | fetcher = bb.fetch2.Fetch(src_uri, d) |
143 | for url in fetcher.urls: | 143 | for url in fetcher.urls: |
144 | ud = fetcher.ud[url] | 144 | ud = fetcher.ud[url] |
diff --git a/meta/classes-recipe/go.bbclass b/meta/classes-recipe/go.bbclass index d32509aa6d..9146dd611e 100644 --- a/meta/classes-recipe/go.bbclass +++ b/meta/classes-recipe/go.bbclass | |||
@@ -80,19 +80,7 @@ export GOPROXY ??= "https://proxy.golang.org,direct" | |||
80 | export GOTMPDIR ?= "${WORKDIR}/build-tmp" | 80 | export GOTMPDIR ?= "${WORKDIR}/build-tmp" |
81 | GOTMPDIR[vardepvalue] = "" | 81 | GOTMPDIR[vardepvalue] = "" |
82 | 82 | ||
83 | python go_do_unpack() { | 83 | GO_SRCURI_DESTSUFFIX = "${@os.path.join(os.path.basename(d.getVar('S')), 'src', d.getVar('GO_IMPORT')) + '/'}" |
84 | src_uri = (d.getVar('SRC_URI') or "").split() | ||
85 | if len(src_uri) == 0: | ||
86 | return | ||
87 | |||
88 | fetcher = bb.fetch2.Fetch(src_uri, d) | ||
89 | for url in fetcher.urls: | ||
90 | if fetcher.ud[url].type == 'git': | ||
91 | if fetcher.ud[url].parm.get('destsuffix') is None: | ||
92 | s_dirname = os.path.basename(d.getVar('S')) | ||
93 | fetcher.ud[url].parm['destsuffix'] = os.path.join(s_dirname, 'src', d.getVar('GO_IMPORT')) + '/' | ||
94 | fetcher.unpack(d.getVar('WORKDIR')) | ||
95 | } | ||
96 | 84 | ||
97 | go_list_packages() { | 85 | go_list_packages() { |
98 | ${GO} list -f '{{.ImportPath}}' ${GOBUILDFLAGS} ${GO_INSTALL} | \ | 86 | ${GO} list -f '{{.ImportPath}}' ${GOBUILDFLAGS} ${GO_INSTALL} | \ |
@@ -151,7 +139,7 @@ go_stage_testdata() { | |||
151 | cd "$oldwd" | 139 | cd "$oldwd" |
152 | } | 140 | } |
153 | 141 | ||
154 | EXPORT_FUNCTIONS do_unpack do_configure do_compile do_install | 142 | EXPORT_FUNCTIONS do_configure do_compile do_install |
155 | 143 | ||
156 | FILES:${PN}-dev = "${libdir}/go/src" | 144 | FILES:${PN}-dev = "${libdir}/go/src" |
157 | FILES:${PN}-staticdev = "${libdir}/go/pkg" | 145 | FILES:${PN}-staticdev = "${libdir}/go/pkg" |
diff --git a/meta/classes-recipe/image_types.bbclass b/meta/classes-recipe/image_types.bbclass index b4a83ae284..2f948ecbf8 100644 --- a/meta/classes-recipe/image_types.bbclass +++ b/meta/classes-recipe/image_types.bbclass | |||
@@ -113,7 +113,7 @@ IMAGE_CMD:btrfs () { | |||
113 | 113 | ||
114 | oe_mksquashfs () { | 114 | oe_mksquashfs () { |
115 | local comp=$1; shift | 115 | local comp=$1; shift |
116 | local extra_imagecmd=$@ | 116 | local extra_imagecmd="$@" |
117 | 117 | ||
118 | if [ "$comp" = "zstd" ]; then | 118 | if [ "$comp" = "zstd" ]; then |
119 | suffix="zst" | 119 | suffix="zst" |
diff --git a/meta/classes-recipe/kernel-yocto.bbclass b/meta/classes-recipe/kernel-yocto.bbclass index 6468e8aa90..f741a342d4 100644 --- a/meta/classes-recipe/kernel-yocto.bbclass +++ b/meta/classes-recipe/kernel-yocto.bbclass | |||
@@ -234,8 +234,6 @@ do_kernel_metadata() { | |||
234 | for f in ${feat_dirs}; do | 234 | for f in ${feat_dirs}; do |
235 | if [ -d "${UNPACKDIR}/$f/kernel-meta" ]; then | 235 | if [ -d "${UNPACKDIR}/$f/kernel-meta" ]; then |
236 | includes="$includes -I${UNPACKDIR}/$f/kernel-meta" | 236 | includes="$includes -I${UNPACKDIR}/$f/kernel-meta" |
237 | elif [ -d "${UNPACKDIR}/../oe-local-files/$f" ]; then | ||
238 | includes="$includes -I${UNPACKDIR}/../oe-local-files/$f" | ||
239 | elif [ -d "${UNPACKDIR}/$f" ]; then | 237 | elif [ -d "${UNPACKDIR}/$f" ]; then |
240 | includes="$includes -I${UNPACKDIR}/$f" | 238 | includes="$includes -I${UNPACKDIR}/$f" |
241 | fi | 239 | fi |
@@ -379,19 +377,19 @@ do_kernel_checkout() { | |||
379 | set +e | 377 | set +e |
380 | 378 | ||
381 | source_dir=`echo ${S} | sed 's%/$%%'` | 379 | source_dir=`echo ${S} | sed 's%/$%%'` |
382 | source_workdir="${WORKDIR}/git" | 380 | source_workdir="${UNPACKDIR}/git" |
383 | if [ -d "${WORKDIR}/git/" ]; then | 381 | if [ -d "${UNPACKDIR}/git/" ]; then |
384 | # case: git repository | 382 | # case: git repository |
385 | # if S is WORKDIR/git, then we shouldn't be moving or deleting the tree. | 383 | # if S is WORKDIR/git, then we shouldn't be moving or deleting the tree. |
386 | if [ "${source_dir}" != "${source_workdir}" ]; then | 384 | if [ "${source_dir}" != "${source_workdir}" ]; then |
387 | if [ -d "${source_workdir}/.git" ]; then | 385 | if [ -d "${source_workdir}/.git" ]; then |
388 | # regular git repository with .git | 386 | # regular git repository with .git |
389 | rm -rf ${S} | 387 | rm -rf ${S} |
390 | mv ${WORKDIR}/git ${S} | 388 | mv ${UNPACKDIR}/git ${S} |
391 | else | 389 | else |
392 | # create source for bare cloned git repository | 390 | # create source for bare cloned git repository |
393 | git clone ${WORKDIR}/git ${S} | 391 | git clone ${WORKDIR}/git ${S} |
394 | rm -rf ${WORKDIR}/git | 392 | rm -rf ${UNPACKDIR}/git |
395 | fi | 393 | fi |
396 | fi | 394 | fi |
397 | cd ${S} | 395 | cd ${S} |
@@ -434,7 +432,7 @@ do_kernel_checkout() { | |||
434 | 432 | ||
435 | set -e | 433 | set -e |
436 | } | 434 | } |
437 | do_kernel_checkout[dirs] = "${S} ${WORKDIR}" | 435 | do_kernel_checkout[dirs] = "${S} ${UNPACKDIR}" |
438 | 436 | ||
439 | addtask kernel_checkout before do_kernel_metadata after do_symlink_kernsrc | 437 | addtask kernel_checkout before do_kernel_metadata after do_symlink_kernsrc |
440 | addtask kernel_metadata after do_validate_branches do_unpack before do_patch | 438 | addtask kernel_metadata after do_validate_branches do_unpack before do_patch |
@@ -442,6 +440,11 @@ do_kernel_metadata[depends] = "kern-tools-native:do_populate_sysroot" | |||
442 | do_kernel_metadata[file-checksums] = " ${@get_dirs_with_fragments(d)}" | 440 | do_kernel_metadata[file-checksums] = " ${@get_dirs_with_fragments(d)}" |
443 | do_validate_branches[depends] = "kern-tools-native:do_populate_sysroot" | 441 | do_validate_branches[depends] = "kern-tools-native:do_populate_sysroot" |
444 | 442 | ||
443 | # ${S} doesn't exist for us at unpack | ||
444 | do_qa_unpack() { | ||
445 | return | ||
446 | } | ||
447 | |||
445 | do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}binutils:do_populate_sysroot" | 448 | do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}binutils:do_populate_sysroot" |
446 | do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}gcc:do_populate_sysroot" | 449 | do_kernel_configme[depends] += "virtual/${TARGET_PREFIX}gcc:do_populate_sysroot" |
447 | do_kernel_configme[depends] += "bc-native:do_populate_sysroot bison-native:do_populate_sysroot" | 450 | do_kernel_configme[depends] += "bc-native:do_populate_sysroot bison-native:do_populate_sysroot" |
diff --git a/meta/classes-recipe/populate_sdk_ext.bbclass b/meta/classes-recipe/populate_sdk_ext.bbclass index f5687e5899..09d5e2aeb6 100644 --- a/meta/classes-recipe/populate_sdk_ext.bbclass +++ b/meta/classes-recipe/populate_sdk_ext.bbclass | |||
@@ -276,6 +276,8 @@ def write_bblayers_conf(d, baseoutpath, sdkbblayers): | |||
276 | def copy_uninative(d, baseoutpath): | 276 | def copy_uninative(d, baseoutpath): |
277 | import shutil | 277 | import shutil |
278 | 278 | ||
279 | uninative_checksum = None | ||
280 | |||
279 | # Copy uninative tarball | 281 | # Copy uninative tarball |
280 | # For now this is where uninative.bbclass expects the tarball | 282 | # For now this is where uninative.bbclass expects the tarball |
281 | if bb.data.inherits_class('uninative', d): | 283 | if bb.data.inherits_class('uninative', d): |
diff --git a/meta/classes-recipe/ptest-perl.bbclass b/meta/classes-recipe/ptest-perl.bbclass index c283fdd1fc..a4a9d40d52 100644 --- a/meta/classes-recipe/ptest-perl.bbclass +++ b/meta/classes-recipe/ptest-perl.bbclass | |||
@@ -13,7 +13,7 @@ SRC_URI += "file://ptest-perl/run-ptest" | |||
13 | do_install_ptest_perl() { | 13 | do_install_ptest_perl() { |
14 | install -d ${D}${PTEST_PATH} | 14 | install -d ${D}${PTEST_PATH} |
15 | if [ ! -f ${D}${PTEST_PATH}/run-ptest ]; then | 15 | if [ ! -f ${D}${PTEST_PATH}/run-ptest ]; then |
16 | install -m 0755 ${WORKDIR}/ptest-perl/run-ptest ${D}${PTEST_PATH} | 16 | install -m 0755 ${UNPACKDIR}/ptest-perl/run-ptest ${D}${PTEST_PATH} |
17 | fi | 17 | fi |
18 | cp -r ${B}/t ${D}${PTEST_PATH} | 18 | cp -r ${B}/t ${D}${PTEST_PATH} |
19 | chown -R root:root ${D}${PTEST_PATH} | 19 | chown -R root:root ${D}${PTEST_PATH} |
diff --git a/meta/classes-recipe/siteinfo.bbclass b/meta/classes-recipe/siteinfo.bbclass index 68aefb8eda..25b53d929a 100644 --- a/meta/classes-recipe/siteinfo.bbclass +++ b/meta/classes-recipe/siteinfo.bbclass | |||
@@ -221,20 +221,6 @@ def siteinfo_get_files(d, sysrootcache=False): | |||
221 | # This would show up as breaking sstatetests.SStateTests.test_sstate_32_64_same_hash for example | 221 | # This would show up as breaking sstatetests.SStateTests.test_sstate_32_64_same_hash for example |
222 | searched = [] | 222 | searched = [] |
223 | 223 | ||
224 | if not sysrootcache: | ||
225 | return sitefiles, searched | ||
226 | |||
227 | # Now check for siteconfig cache files in sysroots | ||
228 | path_siteconfig = d.getVar('SITECONFIG_SYSROOTCACHE') | ||
229 | if path_siteconfig and os.path.isdir(path_siteconfig): | ||
230 | for i in os.listdir(path_siteconfig): | ||
231 | if not i.endswith("_config"): | ||
232 | continue | ||
233 | filename = os.path.join(path_siteconfig, i) | ||
234 | sitefiles.append(filename) | ||
235 | return sitefiles, searched | 224 | return sitefiles, searched |
236 | 225 | ||
237 | # | 226 | |
238 | # Make some information available via variables | ||
239 | # | ||
240 | SITECONFIG_SYSROOTCACHE = "${STAGING_DATADIR}/${TARGET_SYS}_config_site.d" | ||
diff --git a/meta/classes-recipe/systemd.bbclass b/meta/classes-recipe/systemd.bbclass index 48b364c1d4..0f7e3b5a08 100644 --- a/meta/classes-recipe/systemd.bbclass +++ b/meta/classes-recipe/systemd.bbclass | |||
@@ -85,7 +85,7 @@ python systemd_populate_packages() { | |||
85 | def systemd_check_package(pkg_systemd): | 85 | def systemd_check_package(pkg_systemd): |
86 | packages = d.getVar('PACKAGES') | 86 | packages = d.getVar('PACKAGES') |
87 | if not pkg_systemd in packages.split(): | 87 | if not pkg_systemd in packages.split(): |
88 | bb.error('%s does not appear in package list, please add it' % pkg_systemd) | 88 | bb.error('%s is marked for packaging systemd scripts, but it does not appear in package list, please add it to PACKAGES or adjust SYSTEMD_PACKAGES accordingly' % pkg_systemd) |
89 | 89 | ||
90 | 90 | ||
91 | def systemd_generate_package_scripts(pkg): | 91 | def systemd_generate_package_scripts(pkg): |
diff --git a/meta/classes-recipe/toolchain-scripts.bbclass b/meta/classes-recipe/toolchain-scripts.bbclass index 6bfe0b6de0..b59a295abc 100644 --- a/meta/classes-recipe/toolchain-scripts.bbclass +++ b/meta/classes-recipe/toolchain-scripts.bbclass | |||
@@ -16,6 +16,13 @@ DEBUG_PREFIX_MAP = "" | |||
16 | 16 | ||
17 | EXPORT_SDK_PS1 = "${@ 'export PS1=\\"%s\\"' % d.getVar('SDK_PS1') if d.getVar('SDK_PS1') else ''}" | 17 | EXPORT_SDK_PS1 = "${@ 'export PS1=\\"%s\\"' % d.getVar('SDK_PS1') if d.getVar('SDK_PS1') else ''}" |
18 | 18 | ||
19 | def siteinfo_with_prefix(d, prefix): | ||
20 | # Return a prefixed value from siteinfo | ||
21 | for item in siteinfo_data_for_machine(d.getVar("TARGET_ARCH"), d.getVar("TARGET_OS"), d): | ||
22 | if item.startswith(prefix): | ||
23 | return item.replace(prefix, "") | ||
24 | raise KeyError | ||
25 | |||
19 | # This function creates an environment-setup-script for use in a deployable SDK | 26 | # This function creates an environment-setup-script for use in a deployable SDK |
20 | toolchain_create_sdk_env_script () { | 27 | toolchain_create_sdk_env_script () { |
21 | # Create environment setup script. Remember that $SDKTARGETSYSROOT should | 28 | # Create environment setup script. Remember that $SDKTARGETSYSROOT should |
@@ -63,6 +70,8 @@ toolchain_create_sdk_env_script () { | |||
63 | echo 'export OECORE_BASELIB="${baselib}"' >> $script | 70 | echo 'export OECORE_BASELIB="${baselib}"' >> $script |
64 | echo 'export OECORE_TARGET_ARCH="${TARGET_ARCH}"' >>$script | 71 | echo 'export OECORE_TARGET_ARCH="${TARGET_ARCH}"' >>$script |
65 | echo 'export OECORE_TARGET_OS="${TARGET_OS}"' >>$script | 72 | echo 'export OECORE_TARGET_OS="${TARGET_OS}"' >>$script |
73 | echo 'export OECORE_TARGET_BITS="${@siteinfo_with_prefix(d, 'bit-')}"' >>$script | ||
74 | echo 'export OECORE_TARGET_ENDIAN="${@siteinfo_with_prefix(d, 'endian-')}"' >>$script | ||
66 | 75 | ||
67 | echo 'unset command_not_found_handle' >> $script | 76 | echo 'unset command_not_found_handle' >> $script |
68 | 77 | ||
@@ -192,7 +201,6 @@ EOF | |||
192 | 201 | ||
193 | #we get the cached site config in the runtime | 202 | #we get the cached site config in the runtime |
194 | TOOLCHAIN_CONFIGSITE_NOCACHE = "${@' '.join(siteinfo_get_files(d)[0])}" | 203 | TOOLCHAIN_CONFIGSITE_NOCACHE = "${@' '.join(siteinfo_get_files(d)[0])}" |
195 | TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d" | ||
196 | TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" | 204 | TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" |
197 | DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}" | 205 | DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}" |
198 | 206 | ||
@@ -214,14 +222,8 @@ toolchain_create_sdk_siteconfig () { | |||
214 | sitefile=`echo $sitefile | tr / _` | 222 | sitefile=`echo $sitefile | tr / _` |
215 | sitefile=`cat ${STAGING_DIR_TARGET}/sysroot-providers/$sitefile` | 223 | sitefile=`cat ${STAGING_DIR_TARGET}/sysroot-providers/$sitefile` |
216 | esac | 224 | esac |
217 | |||
218 | if [ -r ${TOOLCHAIN_CONFIGSITE_SYSROOTCACHE}/${sitefile}_config ]; then | ||
219 | cat ${TOOLCHAIN_CONFIGSITE_SYSROOTCACHE}/${sitefile}_config >> $siteconfig | ||
220 | fi | ||
221 | done | 225 | done |
222 | } | 226 | } |
223 | # The immediate expansion above can result in unwanted path dependencies here | ||
224 | toolchain_create_sdk_siteconfig[vardepsexclude] = "TOOLCHAIN_CONFIGSITE_SYSROOTCACHE" | ||
225 | 227 | ||
226 | python __anonymous () { | 228 | python __anonymous () { |
227 | import oe.classextend | 229 | import oe.classextend |
diff --git a/meta/classes/devtool-source.bbclass b/meta/classes/devtool-source.bbclass index 4158c20c7e..3e24800dcb 100644 --- a/meta/classes/devtool-source.bbclass +++ b/meta/classes/devtool-source.bbclass | |||
@@ -26,8 +26,6 @@ | |||
26 | 26 | ||
27 | 27 | ||
28 | DEVTOOL_TEMPDIR ?= "" | 28 | DEVTOOL_TEMPDIR ?= "" |
29 | DEVTOOL_PATCH_SRCDIR = "${DEVTOOL_TEMPDIR}/patchworkdir" | ||
30 | |||
31 | 29 | ||
32 | python() { | 30 | python() { |
33 | tempdir = d.getVar('DEVTOOL_TEMPDIR') | 31 | tempdir = d.getVar('DEVTOOL_TEMPDIR') |
@@ -60,7 +58,6 @@ python() { | |||
60 | else: | 58 | else: |
61 | unpacktask = 'do_unpack' | 59 | unpacktask = 'do_unpack' |
62 | d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack') | 60 | d.appendVarFlag(unpacktask, 'postfuncs', ' devtool_post_unpack') |
63 | d.prependVarFlag('do_patch', 'prefuncs', ' devtool_pre_patch') | ||
64 | d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch') | 61 | d.appendVarFlag('do_patch', 'postfuncs', ' devtool_post_patch') |
65 | 62 | ||
66 | # NOTE: in order for the patch stuff to be fully functional, | 63 | # NOTE: in order for the patch stuff to be fully functional, |
@@ -79,67 +76,23 @@ python devtool_post_unpack() { | |||
79 | 76 | ||
80 | tempdir = d.getVar('DEVTOOL_TEMPDIR') | 77 | tempdir = d.getVar('DEVTOOL_TEMPDIR') |
81 | workdir = d.getVar('WORKDIR') | 78 | workdir = d.getVar('WORKDIR') |
79 | unpackdir = d.getVar('UNPACKDIR') | ||
82 | srcsubdir = d.getVar('S') | 80 | srcsubdir = d.getVar('S') |
83 | 81 | ||
84 | def _move_file(src, dst): | 82 | # Add locally copied files to gitignore as we add back to the metadata directly |
85 | """Move a file. Creates all the directory components of destination path.""" | ||
86 | dst_d = os.path.dirname(dst) | ||
87 | if dst_d: | ||
88 | bb.utils.mkdirhier(dst_d) | ||
89 | shutil.move(src, dst) | ||
90 | |||
91 | def _ls_tree(directory): | ||
92 | """Recursive listing of files in a directory""" | ||
93 | ret = [] | ||
94 | for root, dirs, files in os.walk(directory): | ||
95 | ret.extend([os.path.relpath(os.path.join(root, fname), directory) for | ||
96 | fname in files]) | ||
97 | return ret | ||
98 | |||
99 | is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d) | ||
100 | # Move local source files into separate subdir | ||
101 | recipe_patches = [os.path.basename(patch) for patch in | ||
102 | oe.recipeutils.get_recipe_patches(d)] | ||
103 | local_files = oe.recipeutils.get_recipe_local_files(d) | 83 | local_files = oe.recipeutils.get_recipe_local_files(d) |
104 | |||
105 | if is_kernel_yocto: | ||
106 | for key in [f for f in local_files if f.endswith('scc')]: | ||
107 | with open(local_files[key], 'r') as sccfile: | ||
108 | for l in sccfile: | ||
109 | line = l.split() | ||
110 | if line and line[0] in ('kconf', 'patch'): | ||
111 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
112 | if cfg not in local_files.values(): | ||
113 | local_files[line[-1]] = cfg | ||
114 | shutil.copy2(cfg, workdir) | ||
115 | |||
116 | # Ignore local files with subdir={BP} | ||
117 | srcabspath = os.path.abspath(srcsubdir) | 84 | srcabspath = os.path.abspath(srcsubdir) |
118 | local_files = [fname for fname in local_files if | 85 | local_files = [fname for fname in local_files if |
119 | os.path.exists(os.path.join(workdir, fname)) and | 86 | os.path.exists(os.path.join(unpackdir, fname)) and |
120 | (srcabspath == workdir or not | 87 | srcabspath == unpackdir] |
121 | os.path.join(workdir, fname).startswith(srcabspath + | ||
122 | os.sep))] | ||
123 | if local_files: | 88 | if local_files: |
124 | for fname in local_files: | 89 | with open(os.path.join(tempdir, '.gitignore'), 'a+') as f: |
125 | _move_file(os.path.join(workdir, fname), | 90 | f.write('# Ignore local files, by default. Remove following lines' |
126 | os.path.join(tempdir, 'oe-local-files', fname)) | 91 | 'if you want to commit the directory to Git\n') |
127 | with open(os.path.join(tempdir, 'oe-local-files', '.gitignore'), | 92 | for fname in local_files: |
128 | 'w') as f: | 93 | f.write('%s\n' % fname) |
129 | f.write('# Ignore local files, by default. Remove this file ' | 94 | |
130 | 'if you want to commit the directory to Git\n*\n') | 95 | if os.path.dirname(srcsubdir) != workdir: |
131 | |||
132 | if srcsubdir == workdir: | ||
133 | # Find non-patch non-local sources that were "unpacked" to srctree | ||
134 | # directory | ||
135 | src_files = [fname for fname in _ls_tree(workdir) if | ||
136 | os.path.basename(fname) not in recipe_patches] | ||
137 | srcsubdir = d.getVar('DEVTOOL_PATCH_SRCDIR') | ||
138 | # Move source files to S | ||
139 | for path in src_files: | ||
140 | _move_file(os.path.join(workdir, path), | ||
141 | os.path.join(srcsubdir, path)) | ||
142 | elif os.path.dirname(srcsubdir) != workdir: | ||
143 | # Handle if S is set to a subdirectory of the source | 96 | # Handle if S is set to a subdirectory of the source |
144 | srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0]) | 97 | srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0]) |
145 | 98 | ||
@@ -164,11 +117,6 @@ python devtool_post_unpack() { | |||
164 | f.write(srcsubdir) | 117 | f.write(srcsubdir) |
165 | } | 118 | } |
166 | 119 | ||
167 | python devtool_pre_patch() { | ||
168 | if d.getVar('S') == d.getVar('WORKDIR'): | ||
169 | d.setVar('S', '${DEVTOOL_PATCH_SRCDIR}') | ||
170 | } | ||
171 | |||
172 | python devtool_post_patch() { | 120 | python devtool_post_patch() { |
173 | import shutil | 121 | import shutil |
174 | tempdir = d.getVar('DEVTOOL_TEMPDIR') | 122 | tempdir = d.getVar('DEVTOOL_TEMPDIR') |
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass deleted file mode 100644 index 953cafd285..0000000000 --- a/meta/classes/siteconfig.bbclass +++ /dev/null | |||
@@ -1,39 +0,0 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: MIT | ||
5 | # | ||
6 | |||
7 | python siteconfig_do_siteconfig () { | ||
8 | shared_state = sstate_state_fromvars(d) | ||
9 | if shared_state['task'] != 'populate_sysroot': | ||
10 | return | ||
11 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')): | ||
12 | bb.debug(1, "No site_config directory, skipping do_siteconfig") | ||
13 | return | ||
14 | sstate_install(shared_state, d) | ||
15 | bb.build.exec_func('do_siteconfig_gencache', d) | ||
16 | sstate_clean(shared_state, d) | ||
17 | } | ||
18 | |||
19 | EXTRASITECONFIG ?= "" | ||
20 | |||
21 | siteconfig_do_siteconfig_gencache () { | ||
22 | mkdir -p ${WORKDIR}/site_config_${MACHINE} | ||
23 | gen-site-config ${FILE_DIRNAME}/site_config \ | ||
24 | >${WORKDIR}/site_config_${MACHINE}/configure.ac | ||
25 | cd ${WORKDIR}/site_config_${MACHINE} | ||
26 | autoconf | ||
27 | rm -f ${BPN}_cache | ||
28 | CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache | ||
29 | sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \ | ||
30 | -e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \ | ||
31 | < ${BPN}_cache > ${BPN}_config | ||
32 | mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d | ||
33 | cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d | ||
34 | |||
35 | } | ||
36 | |||
37 | do_populate_sysroot[sstate-interceptfuncs] += "do_siteconfig " | ||
38 | |||
39 | EXPORT_FUNCTIONS do_siteconfig do_siteconfig_gencache | ||
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf index b2c500d873..3ef2deb088 100644 --- a/meta/conf/bitbake.conf +++ b/meta/conf/bitbake.conf | |||
@@ -405,7 +405,7 @@ STAMP = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/${PV}" | |||
405 | STAMPCLEAN = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/*-*" | 405 | STAMPCLEAN = "${STAMPS_DIR}/${MULTIMACH_TARGET_SYS}/${PN}/*-*" |
406 | BASE_WORKDIR ?= "${TMPDIR}/work" | 406 | BASE_WORKDIR ?= "${TMPDIR}/work" |
407 | WORKDIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/${PV}" | 407 | WORKDIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/${PV}" |
408 | UNPACKDIR ??= "${WORKDIR}" | 408 | UNPACKDIR ??= "${WORKDIR}/sources-unpack" |
409 | T = "${WORKDIR}/temp" | 409 | T = "${WORKDIR}/temp" |
410 | D = "${WORKDIR}/image" | 410 | D = "${WORKDIR}/image" |
411 | S = "${WORKDIR}/${BP}" | 411 | S = "${WORKDIR}/${BP}" |
@@ -752,7 +752,7 @@ PSEUDO_SYSROOT = "${COMPONENTS_DIR}/${BUILD_ARCH}/pseudo-native" | |||
752 | PSEUDO_IGNORE_PATHS = "/usr/,/etc/,/lib,/dev/,/run/,${T},${WORKDIR}/recipe-sysroot,${SSTATE_DIR},${STAMPS_DIR}" | 752 | PSEUDO_IGNORE_PATHS = "/usr/,/etc/,/lib,/dev/,/run/,${T},${WORKDIR}/recipe-sysroot,${SSTATE_DIR},${STAMPS_DIR}" |
753 | PSEUDO_IGNORE_PATHS .= ",${TMPDIR}/sstate-control,${TMPDIR}/buildstats,${TMPDIR}/sysroots-components,${TMPDIR}/pkgdata" | 753 | PSEUDO_IGNORE_PATHS .= ",${TMPDIR}/sstate-control,${TMPDIR}/buildstats,${TMPDIR}/sysroots-components,${TMPDIR}/pkgdata" |
754 | PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/deploy-,${WORKDIR}/sstate-build-package_,${WORKDIR}/sstate-install-package_,${WORKDIR}/pkgdata-sysroot" | 754 | PSEUDO_IGNORE_PATHS .= ",${WORKDIR}/deploy-,${WORKDIR}/sstate-build-package_,${WORKDIR}/sstate-install-package_,${WORKDIR}/pkgdata-sysroot" |
755 | PSEUDO_IGNORE_PATHS .= ",${DEPLOY_DIR},${BUILDHISTORY_DIR},${TOPDIR}/cache,${COREBASE}/scripts,${CCACHE_DIR}" | 755 | PSEUDO_IGNORE_PATHS .= ",${DEPLOY_DIR},${BUILDHISTORY_DIR},${TOPDIR}/cache,${COREBASE}/scripts,${CCACHE_DIR},${S},${B}" |
756 | 756 | ||
757 | export PSEUDO_DISABLED = "1" | 757 | export PSEUDO_DISABLED = "1" |
758 | #export PSEUDO_PREFIX = "${STAGING_DIR_NATIVE}${prefix_native}" | 758 | #export PSEUDO_PREFIX = "${STAGING_DIR_NATIVE}${prefix_native}" |
diff --git a/meta/conf/distro/include/distro_alias.inc b/meta/conf/distro/include/distro_alias.inc index 2b579339b2..ccbc1d1969 100644 --- a/meta/conf/distro/include/distro_alias.inc +++ b/meta/conf/distro/include/distro_alias.inc | |||
@@ -151,7 +151,6 @@ DISTRO_PN_ALIAS:pn-libnewt = "Debian=libnewt0.52 Fedora=newt" | |||
151 | DISTRO_PN_ALIAS:pn-libnewt-python = "Ubuntu=python-newt Fedora=newt-python" | 151 | DISTRO_PN_ALIAS:pn-libnewt-python = "Ubuntu=python-newt Fedora=newt-python" |
152 | DISTRO_PN_ALIAS:pn-libnl = "Mandriva=libnl Fedora=libnl" | 152 | DISTRO_PN_ALIAS:pn-libnl = "Mandriva=libnl Fedora=libnl" |
153 | DISTRO_PN_ALIAS:pn-libnss-mdns = "Meego=nss-mdns OpenSuSE=nss-mdns Ubuntu=nss-mdns Mandriva=nss_mdns Debian=nss-mdns" | 153 | DISTRO_PN_ALIAS:pn-libnss-mdns = "Meego=nss-mdns OpenSuSE=nss-mdns Ubuntu=nss-mdns Mandriva=nss_mdns Debian=nss-mdns" |
154 | DISTRO_PN_ALIAS:pn-libomxil = "OSPDT upstream=http://omxil.sourceforge.net/" | ||
155 | DISTRO_PN_ALIAS:pn-libowl = "Debian=owl OpenedHand" | 154 | DISTRO_PN_ALIAS:pn-libowl = "Debian=owl OpenedHand" |
156 | DISTRO_PN_ALIAS:pn-libpam = "Meego=pam Fedora=pam OpenSuSE=pam Ubuntu=pam Mandriva=pam Debian=pam" | 155 | DISTRO_PN_ALIAS:pn-libpam = "Meego=pam Fedora=pam OpenSuSE=pam Ubuntu=pam Mandriva=pam Debian=pam" |
157 | DISTRO_PN_ALIAS:pn-libpcre = "Mandriva=libpcre0 Fedora=pcre" | 156 | DISTRO_PN_ALIAS:pn-libpcre = "Mandriva=libpcre0 Fedora=pcre" |
diff --git a/meta/conf/distro/include/maintainers.inc b/meta/conf/distro/include/maintainers.inc index a11859890e..5028a507eb 100644 --- a/meta/conf/distro/include/maintainers.inc +++ b/meta/conf/distro/include/maintainers.inc | |||
@@ -190,7 +190,7 @@ RECIPE_MAINTAINER:pn-gcc-cross-canadian-${TRANSLATED_TARGET_ARCH} = "Khem Raj <r | |||
190 | RECIPE_MAINTAINER:pn-gcc-crosssdk-${SDK_SYS} = "Khem Raj <raj.khem@gmail.com>" | 190 | RECIPE_MAINTAINER:pn-gcc-crosssdk-${SDK_SYS} = "Khem Raj <raj.khem@gmail.com>" |
191 | RECIPE_MAINTAINER:pn-gcc-runtime = "Khem Raj <raj.khem@gmail.com>" | 191 | RECIPE_MAINTAINER:pn-gcc-runtime = "Khem Raj <raj.khem@gmail.com>" |
192 | RECIPE_MAINTAINER:pn-gcc-sanitizers = "Khem Raj <raj.khem@gmail.com>" | 192 | RECIPE_MAINTAINER:pn-gcc-sanitizers = "Khem Raj <raj.khem@gmail.com>" |
193 | RECIPE_MAINTAINER:pn-gcc-source-13.2.0 = "Khem Raj <raj.khem@gmail.com>" | 193 | RECIPE_MAINTAINER:pn-gcc-source-14.1.0 = "Khem Raj <raj.khem@gmail.com>" |
194 | RECIPE_MAINTAINER:pn-gconf = "Ross Burton <ross.burton@arm.com>" | 194 | RECIPE_MAINTAINER:pn-gconf = "Ross Burton <ross.burton@arm.com>" |
195 | RECIPE_MAINTAINER:pn-gcr = "Alexander Kanavin <alex.kanavin@gmail.com>" | 195 | RECIPE_MAINTAINER:pn-gcr = "Alexander Kanavin <alex.kanavin@gmail.com>" |
196 | RECIPE_MAINTAINER:pn-gdb = "Khem Raj <raj.khem@gmail.com>" | 196 | RECIPE_MAINTAINER:pn-gdb = "Khem Raj <raj.khem@gmail.com>" |
@@ -205,6 +205,7 @@ RECIPE_MAINTAINER:pn-gi-docgen = "Alexander Kanavin <alex.kanavin@gmail.com>" | |||
205 | RECIPE_MAINTAINER:pn-git = "Robert Yang <liezhi.yang@windriver.com>" | 205 | RECIPE_MAINTAINER:pn-git = "Robert Yang <liezhi.yang@windriver.com>" |
206 | RECIPE_MAINTAINER:pn-glew = "Anuj Mittal <anuj.mittal@intel.com>" | 206 | RECIPE_MAINTAINER:pn-glew = "Anuj Mittal <anuj.mittal@intel.com>" |
207 | RECIPE_MAINTAINER:pn-glib-2.0 = "Anuj Mittal <anuj.mittal@intel.com>" | 207 | RECIPE_MAINTAINER:pn-glib-2.0 = "Anuj Mittal <anuj.mittal@intel.com>" |
208 | RECIPE_MAINTAINER:pn-glib-2.0-initial = "Anuj Mittal <anuj.mittal@intel.com>" | ||
208 | RECIPE_MAINTAINER:pn-glib-networking = "Anuj Mittal <anuj.mittal@intel.com>" | 209 | RECIPE_MAINTAINER:pn-glib-networking = "Anuj Mittal <anuj.mittal@intel.com>" |
209 | RECIPE_MAINTAINER:pn-glibc = "Khem Raj <raj.khem@gmail.com>" | 210 | RECIPE_MAINTAINER:pn-glibc = "Khem Raj <raj.khem@gmail.com>" |
210 | RECIPE_MAINTAINER:pn-glibc-locale = "Khem Raj <raj.khem@gmail.com>" | 211 | RECIPE_MAINTAINER:pn-glibc-locale = "Khem Raj <raj.khem@gmail.com>" |
@@ -243,7 +244,6 @@ RECIPE_MAINTAINER:pn-gst-devtools = "Anuj Mittal <anuj.mittal@intel.com>" | |||
243 | RECIPE_MAINTAINER:pn-gst-examples = "Anuj Mittal <anuj.mittal@intel.com>" | 244 | RECIPE_MAINTAINER:pn-gst-examples = "Anuj Mittal <anuj.mittal@intel.com>" |
244 | RECIPE_MAINTAINER:pn-gstreamer1.0 = "Anuj Mittal <anuj.mittal@intel.com>" | 245 | RECIPE_MAINTAINER:pn-gstreamer1.0 = "Anuj Mittal <anuj.mittal@intel.com>" |
245 | RECIPE_MAINTAINER:pn-gstreamer1.0-libav = "Anuj Mittal <anuj.mittal@intel.com>" | 246 | RECIPE_MAINTAINER:pn-gstreamer1.0-libav = "Anuj Mittal <anuj.mittal@intel.com>" |
246 | RECIPE_MAINTAINER:pn-gstreamer1.0-omx = "Anuj Mittal <anuj.mittal@intel.com>" | ||
247 | RECIPE_MAINTAINER:pn-gstreamer1.0-meta-base = "Anuj Mittal <anuj.mittal@intel.com>" | 247 | RECIPE_MAINTAINER:pn-gstreamer1.0-meta-base = "Anuj Mittal <anuj.mittal@intel.com>" |
248 | RECIPE_MAINTAINER:pn-gstreamer1.0-plugins-bad = "Anuj Mittal <anuj.mittal@intel.com>" | 248 | RECIPE_MAINTAINER:pn-gstreamer1.0-plugins-bad = "Anuj Mittal <anuj.mittal@intel.com>" |
249 | RECIPE_MAINTAINER:pn-gstreamer1.0-plugins-base = "Anuj Mittal <anuj.mittal@intel.com>" | 249 | RECIPE_MAINTAINER:pn-gstreamer1.0-plugins-base = "Anuj Mittal <anuj.mittal@intel.com>" |
@@ -365,7 +365,6 @@ RECIPE_MAINTAINER:pn-libnsl2 = "Khem Raj <raj.khem@gmail.com>" | |||
365 | RECIPE_MAINTAINER:pn-libnss-mdns = "Alexander Kanavin <alex.kanavin@gmail.com>" | 365 | RECIPE_MAINTAINER:pn-libnss-mdns = "Alexander Kanavin <alex.kanavin@gmail.com>" |
366 | RECIPE_MAINTAINER:pn-libnss-nis = "Khem Raj <raj.khem@gmail.com>" | 366 | RECIPE_MAINTAINER:pn-libnss-nis = "Khem Raj <raj.khem@gmail.com>" |
367 | RECIPE_MAINTAINER:pn-libogg = "Anuj Mittal <anuj.mittal@intel.com>" | 367 | RECIPE_MAINTAINER:pn-libogg = "Anuj Mittal <anuj.mittal@intel.com>" |
368 | RECIPE_MAINTAINER:pn-libomxil = "Anuj Mittal <anuj.mittal@intel.com>" | ||
369 | RECIPE_MAINTAINER:pn-libpam = "Anuj Mittal <anuj.mittal@intel.com>" | 368 | RECIPE_MAINTAINER:pn-libpam = "Anuj Mittal <anuj.mittal@intel.com>" |
370 | RECIPE_MAINTAINER:pn-libpcap = "Anuj Mittal <anuj.mittal@intel.com>" | 369 | RECIPE_MAINTAINER:pn-libpcap = "Anuj Mittal <anuj.mittal@intel.com>" |
371 | RECIPE_MAINTAINER:pn-libpciaccess = "Wang Mingyu <wangmy@fujitsu.com>" | 370 | RECIPE_MAINTAINER:pn-libpciaccess = "Wang Mingyu <wangmy@fujitsu.com>" |
@@ -555,7 +554,7 @@ RECIPE_MAINTAINER:pn-npth = "Alexander Kanavin <alex.kanavin@gmail.com>" | |||
555 | RECIPE_MAINTAINER:pn-nss-myhostname = "Anuj Mittal <anuj.mittal@intel.com>" | 554 | RECIPE_MAINTAINER:pn-nss-myhostname = "Anuj Mittal <anuj.mittal@intel.com>" |
556 | RECIPE_MAINTAINER:pn-numactl = "Richard Purdie <richard.purdie@linuxfoundation.org>" | 555 | RECIPE_MAINTAINER:pn-numactl = "Richard Purdie <richard.purdie@linuxfoundation.org>" |
557 | RECIPE_MAINTAINER:pn-ofono = "Ross Burton <ross.burton@arm.com>" | 556 | RECIPE_MAINTAINER:pn-ofono = "Ross Burton <ross.burton@arm.com>" |
558 | RECIPE_MAINTAINER:pn-opensbi = "Unassigned <unassigned@yoctoproject.org>" | 557 | RECIPE_MAINTAINER:pn-opensbi = "Thomas Perrot <thomas.perrot@bootlin.com>" |
559 | RECIPE_MAINTAINER:pn-openssh = "Unassigned <unassigned@yoctoproject.org>" | 558 | RECIPE_MAINTAINER:pn-openssh = "Unassigned <unassigned@yoctoproject.org>" |
560 | RECIPE_MAINTAINER:pn-openssl = "Alexander Kanavin <alex.kanavin@gmail.com>" | 559 | RECIPE_MAINTAINER:pn-openssl = "Alexander Kanavin <alex.kanavin@gmail.com>" |
561 | RECIPE_MAINTAINER:pn-opkg = "Alex Stewart <alex.stewart@ni.com>" | 560 | RECIPE_MAINTAINER:pn-opkg = "Alex Stewart <alex.stewart@ni.com>" |
@@ -626,7 +625,7 @@ RECIPE_MAINTAINER:pn-python3-dtschema-wrapper = "Bruce Ashfield <bruce.ashfield@ | |||
626 | RECIPE_MAINTAINER:pn-python3-editables = "Ross Burton <ross.burton@arm.com>" | 625 | RECIPE_MAINTAINER:pn-python3-editables = "Ross Burton <ross.burton@arm.com>" |
627 | RECIPE_MAINTAINER:pn-python3-pycryptodome = "Joshua Watt <JPEWhacker@gmail.com>" | 626 | RECIPE_MAINTAINER:pn-python3-pycryptodome = "Joshua Watt <JPEWhacker@gmail.com>" |
628 | RECIPE_MAINTAINER:pn-python3-pycryptodomex = "Joshua Watt <JPEWhacker@gmail.com>" | 627 | RECIPE_MAINTAINER:pn-python3-pycryptodomex = "Joshua Watt <JPEWhacker@gmail.com>" |
629 | RECIPE_MAINTAINER:pn-python3-pyproject-metadata = "Tim Orling <tim.orling@konsulko.com>" | 628 | RECIPE_MAINTAINER:pn-python3-pyproject-metadata = "Trevor Gamblin <tgamblin@baylibre.com>" |
630 | RECIPE_MAINTAINER:pn-python3-pyrsistent = "Bruce Ashfield <bruce.ashfield@gmail.com>" | 629 | RECIPE_MAINTAINER:pn-python3-pyrsistent = "Bruce Ashfield <bruce.ashfield@gmail.com>" |
631 | RECIPE_MAINTAINER:pn-python3-extras = "Trevor Gamblin <tgamblin@baylibre.com>" | 630 | RECIPE_MAINTAINER:pn-python3-extras = "Trevor Gamblin <tgamblin@baylibre.com>" |
632 | RECIPE_MAINTAINER:pn-python3-flit-core = "Tim Orling <tim.orling@konsulko.com>" | 631 | RECIPE_MAINTAINER:pn-python3-flit-core = "Tim Orling <tim.orling@konsulko.com>" |
@@ -635,7 +634,7 @@ RECIPE_MAINTAINER:pn-python3-gitdb = "Trevor Gamblin <tgamblin@baylibre.com>" | |||
635 | RECIPE_MAINTAINER:pn-python3-hatchling = "Ross Burton <ross.burton@arm.com>" | 634 | RECIPE_MAINTAINER:pn-python3-hatchling = "Ross Burton <ross.burton@arm.com>" |
636 | RECIPE_MAINTAINER:pn-python3-hatch-fancy-pypi-readme = "Ross Burton <ross.burton@arm.com>" | 635 | RECIPE_MAINTAINER:pn-python3-hatch-fancy-pypi-readme = "Ross Burton <ross.burton@arm.com>" |
637 | RECIPE_MAINTAINER:pn-python3-hatch-vcs = "Ross Burton <ross.burton@arm.com>" | 636 | RECIPE_MAINTAINER:pn-python3-hatch-vcs = "Ross Burton <ross.burton@arm.com>" |
638 | RECIPE_MAINTAINER:pn-python3-hypothesis = "Tim Orling <tim.orling@konsulko.com>" | 637 | RECIPE_MAINTAINER:pn-python3-hypothesis = "Trevor Gamblin <tgamblin@baylibre.com>" |
639 | RECIPE_MAINTAINER:pn-python3-idna = "Bruce Ashfield <bruce.ashfield@gmail.com>" | 638 | RECIPE_MAINTAINER:pn-python3-idna = "Bruce Ashfield <bruce.ashfield@gmail.com>" |
640 | RECIPE_MAINTAINER:pn-python3-imagesize = "Tim Orling <tim.orling@konsulko.com>" | 639 | RECIPE_MAINTAINER:pn-python3-imagesize = "Tim Orling <tim.orling@konsulko.com>" |
641 | RECIPE_MAINTAINER:pn-python3-importlib-metadata = "Tim Orling <tim.orling@konsulko.com>" | 640 | RECIPE_MAINTAINER:pn-python3-importlib-metadata = "Tim Orling <tim.orling@konsulko.com>" |
@@ -675,19 +674,19 @@ RECIPE_MAINTAINER:pn-python3-pyasn1 = "Tim Orling <tim.orling@konsulko.com>" | |||
675 | RECIPE_MAINTAINER:pn-python3-pycairo = "Zang Ruochen <zangruochen@loongson.cn>" | 674 | RECIPE_MAINTAINER:pn-python3-pycairo = "Zang Ruochen <zangruochen@loongson.cn>" |
676 | RECIPE_MAINTAINER:pn-python3-pycparser = "Tim Orling <tim.orling@konsulko.com>" | 675 | RECIPE_MAINTAINER:pn-python3-pycparser = "Tim Orling <tim.orling@konsulko.com>" |
677 | RECIPE_MAINTAINER:pn-python3-pyelftools = "Joshua Watt <JPEWhacker@gmail.com>" | 676 | RECIPE_MAINTAINER:pn-python3-pyelftools = "Joshua Watt <JPEWhacker@gmail.com>" |
678 | RECIPE_MAINTAINER:pn-python3-pygments = "Tim Orling <tim.orling@konsulko.com>" | 677 | RECIPE_MAINTAINER:pn-python3-pygments = "Trevor Gamblin <tgamblin@baylibre.com>" |
679 | RECIPE_MAINTAINER:pn-python3-pygobject = "Zang Ruochen <zangruochen@loongson.cn>" | 678 | RECIPE_MAINTAINER:pn-python3-pygobject = "Zang Ruochen <zangruochen@loongson.cn>" |
680 | RECIPE_MAINTAINER:pn-python3-pyopenssl = "Tim Orling <tim.orling@konsulko.com>" | 679 | RECIPE_MAINTAINER:pn-python3-pyopenssl = "Tim Orling <tim.orling@konsulko.com>" |
681 | RECIPE_MAINTAINER:pn-python3-pyparsing = "Trevor Gamblin <tgamblin@baylibre.com>" | 680 | RECIPE_MAINTAINER:pn-python3-pyparsing = "Trevor Gamblin <tgamblin@baylibre.com>" |
682 | RECIPE_MAINTAINER:pn-python3-pyproject-hooks = "Ross Burton <ross.burton@arm.com>" | 681 | RECIPE_MAINTAINER:pn-python3-pyproject-hooks = "Ross Burton <ross.burton@arm.com>" |
683 | RECIPE_MAINTAINER:pn-python3-pysocks = "Tim Orling <tim.orling@konsulko.com>" | 682 | RECIPE_MAINTAINER:pn-python3-pysocks = "Tim Orling <tim.orling@konsulko.com>" |
684 | RECIPE_MAINTAINER:pn-python3-pytest = "Tim Orling <tim.orling@konsulko.com>" | 683 | RECIPE_MAINTAINER:pn-python3-pytest = "Trevor Gamblin <tgamblin@baylibre.com>" |
685 | RECIPE_MAINTAINER:pn-python3-pytest-runner = "Tim Orling <tim.orling@konsulko.com>" | 684 | RECIPE_MAINTAINER:pn-python3-pytest-runner = "Tim Orling <tim.orling@konsulko.com>" |
686 | RECIPE_MAINTAINER:pn-python3-pytest-subtests = "Tim Orling <tim.orling@konsulko.com>" | 685 | RECIPE_MAINTAINER:pn-python3-pytest-subtests = "Tim Orling <tim.orling@konsulko.com>" |
687 | RECIPE_MAINTAINER:pn-python3-pytz = "Tim Orling <tim.orling@konsulko.com>" | 686 | RECIPE_MAINTAINER:pn-python3-pytz = "Tim Orling <tim.orling@konsulko.com>" |
688 | RECIPE_MAINTAINER:pn-python3-pyyaml = "Tim Orling <tim.orling@konsulko.com>" | 687 | RECIPE_MAINTAINER:pn-python3-pyyaml = "Tim Orling <tim.orling@konsulko.com>" |
689 | RECIPE_MAINTAINER:pn-python3-rdflib = "Wang Mingyu <wangmy@fujitsu.com>" | 688 | RECIPE_MAINTAINER:pn-python3-rdflib = "Wang Mingyu <wangmy@fujitsu.com>" |
690 | RECIPE_MAINTAINER:pn-python3-referencing = "Tim Orling <tim.orling@konsulko.com>" | 689 | RECIPE_MAINTAINER:pn-python3-referencing = "Trevor Gamblin <tgamblin@baylibre.com>" |
691 | RECIPE_MAINTAINER:pn-python3-requests = "Tim Orling <tim.orling@konsulko.com>" | 690 | RECIPE_MAINTAINER:pn-python3-requests = "Tim Orling <tim.orling@konsulko.com>" |
692 | RECIPE_MAINTAINER:pn-python3-rfc3339-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>" | 691 | RECIPE_MAINTAINER:pn-python3-rfc3339-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>" |
693 | RECIPE_MAINTAINER:pn-python3-rfc3986-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>" | 692 | RECIPE_MAINTAINER:pn-python3-rfc3986-validator = "Bruce Ashfield <bruce.ashfield@gmail.com>" |
@@ -698,14 +697,14 @@ RECIPE_MAINTAINER:pn-python3-scons = "Tim Orling <tim.orling@konsulko.com>" | |||
698 | RECIPE_MAINTAINER:pn-python3-semantic-version = "Tim Orling <tim.orling@konsulko.com>" | 697 | RECIPE_MAINTAINER:pn-python3-semantic-version = "Tim Orling <tim.orling@konsulko.com>" |
699 | RECIPE_MAINTAINER:pn-python3-setuptools = "Unassigned <unassigned@yoctoproject.org>" | 698 | RECIPE_MAINTAINER:pn-python3-setuptools = "Unassigned <unassigned@yoctoproject.org>" |
700 | RECIPE_MAINTAINER:pn-python3-setuptools-rust = "Tim Orling <tim.orling@konsulko.com>" | 699 | RECIPE_MAINTAINER:pn-python3-setuptools-rust = "Tim Orling <tim.orling@konsulko.com>" |
701 | RECIPE_MAINTAINER:pn-python3-setuptools-scm = "Tim Orling <tim.orling@konsulko.com>" | 700 | RECIPE_MAINTAINER:pn-python3-setuptools-scm = "Trevor Gamblin <tgamblin@baylibre.com>" |
702 | RECIPE_MAINTAINER:pn-python3-six = "Zang Ruochen <zangruochen@loongson.cn>" | 701 | RECIPE_MAINTAINER:pn-python3-six = "Zang Ruochen <zangruochen@loongson.cn>" |
703 | RECIPE_MAINTAINER:pn-python3-smartypants = "Alexander Kanavin <alex.kanavin@gmail.com>" | 702 | RECIPE_MAINTAINER:pn-python3-smartypants = "Alexander Kanavin <alex.kanavin@gmail.com>" |
704 | RECIPE_MAINTAINER:pn-python3-smmap = "Unassigned <unassigned@yoctoproject.org>" | 703 | RECIPE_MAINTAINER:pn-python3-smmap = "Unassigned <unassigned@yoctoproject.org>" |
705 | RECIPE_MAINTAINER:pn-python3-snowballstemmer = "Tim Orling <tim.orling@konsulko.com>" | 704 | RECIPE_MAINTAINER:pn-python3-snowballstemmer = "Tim Orling <tim.orling@konsulko.com>" |
706 | RECIPE_MAINTAINER:pn-python3-sortedcontainers = "Tim Orling <tim.orling@konsulko.com>" | 705 | RECIPE_MAINTAINER:pn-python3-sortedcontainers = "Tim Orling <tim.orling@konsulko.com>" |
707 | RECIPE_MAINTAINER:pn-python3-spdx-tools = "Marta Rybczynska <mrybczynska@syslinbit.com>" | 706 | RECIPE_MAINTAINER:pn-python3-spdx-tools = "Marta Rybczynska <mrybczynska@syslinbit.com>" |
708 | RECIPE_MAINTAINER:pn-python3-sphinx = "Tim Orling <tim.orling@konsulko.com>" | 707 | RECIPE_MAINTAINER:pn-python3-sphinx = "Trevor Gamblin <tgamblin@baylibre.com>" |
709 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>" | 708 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-applehelp = "Tim Orling <tim.orling@konsulko.com>" |
710 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-devhelp = "Tim Orling <tim.orling@konsulko.com>" | 709 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-devhelp = "Tim Orling <tim.orling@konsulko.com>" |
711 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-htmlhelp = "Tim Orling <tim.orling@konsulko.com>" | 710 | RECIPE_MAINTAINER:pn-python3-sphinxcontrib-htmlhelp = "Tim Orling <tim.orling@konsulko.com>" |
@@ -829,7 +828,6 @@ RECIPE_MAINTAINER:pn-unifdef = "Ross Burton <ross.burton@arm.com>" | |||
829 | RECIPE_MAINTAINER:pn-uninative-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>" | 828 | RECIPE_MAINTAINER:pn-uninative-tarball = "Richard Purdie <richard.purdie@linuxfoundation.org>" |
830 | RECIPE_MAINTAINER:pn-unzip = "Denys Dmytriyenko <denis@denix.org>" | 829 | RECIPE_MAINTAINER:pn-unzip = "Denys Dmytriyenko <denis@denix.org>" |
831 | RECIPE_MAINTAINER:pn-update-rc.d = "Ross Burton <ross.burton@arm.com>" | 830 | RECIPE_MAINTAINER:pn-update-rc.d = "Ross Burton <ross.burton@arm.com>" |
832 | RECIPE_MAINTAINER:pn-usbinit = "Alexander Kanavin <alex.kanavin@gmail.com>" | ||
833 | RECIPE_MAINTAINER:pn-usbutils = "Alexander Kanavin <alex.kanavin@gmail.com>" | 831 | RECIPE_MAINTAINER:pn-usbutils = "Alexander Kanavin <alex.kanavin@gmail.com>" |
834 | RECIPE_MAINTAINER:pn-util-linux = "Chen Qi <Qi.Chen@windriver.com>" | 832 | RECIPE_MAINTAINER:pn-util-linux = "Chen Qi <Qi.Chen@windriver.com>" |
835 | RECIPE_MAINTAINER:pn-util-linux-libuuid = "Chen Qi <Qi.Chen@windriver.com>" | 833 | RECIPE_MAINTAINER:pn-util-linux-libuuid = "Chen Qi <Qi.Chen@windriver.com>" |
diff --git a/meta/conf/distro/include/ptest-packagelists.inc b/meta/conf/distro/include/ptest-packagelists.inc index 5975db25cc..da6fa6ee97 100644 --- a/meta/conf/distro/include/ptest-packagelists.inc +++ b/meta/conf/distro/include/ptest-packagelists.inc | |||
@@ -69,6 +69,7 @@ PTESTS_FAST = "\ | |||
69 | python3-pyasn1 \ | 69 | python3-pyasn1 \ |
70 | python3-pytz \ | 70 | python3-pytz \ |
71 | python3-pyyaml \ | 71 | python3-pyyaml \ |
72 | python3-rpds-py \ | ||
72 | python3-trove-classifiers \ | 73 | python3-trove-classifiers \ |
73 | python3-wcwidth \ | 74 | python3-wcwidth \ |
74 | python3-webcolors \ | 75 | python3-webcolors \ |
diff --git a/meta/conf/distro/include/tclibc-newlib.inc b/meta/conf/distro/include/tclibc-newlib.inc index 238b430e49..34318b2454 100644 --- a/meta/conf/distro/include/tclibc-newlib.inc +++ b/meta/conf/distro/include/tclibc-newlib.inc | |||
@@ -42,6 +42,6 @@ TOOLCHAIN_HOST_TASK ?= "packagegroup-cross-canadian-${MACHINE} nativesdk-qemu na | |||
42 | TOOLCHAIN_TARGET_TASK ?= "${LIBC_DEPENDENCIES}" | 42 | TOOLCHAIN_TARGET_TASK ?= "${LIBC_DEPENDENCIES}" |
43 | TOOLCHAIN_NEED_CONFIGSITE_CACHE:remove = "zlib ncurses" | 43 | TOOLCHAIN_NEED_CONFIGSITE_CACHE:remove = "zlib ncurses" |
44 | 44 | ||
45 | # disable pie security flags by default | 45 | # disable pie security flags by default since RISCV linker doesnt support them |
46 | SECURITY_CFLAGS:libc-newlib = "${SECURITY_NOPIE_CFLAGS}" | 46 | SECURITY_CFLAGS:libc-newlib:qemuriscv32 = "${SECURITY_NOPIE_CFLAGS}" |
47 | SECURITY_LDFLAGS:libc-newlib = "" | 47 | SECURITY_CFLAGS:libc-newlib:qemuriscv64 = "${SECURITY_NOPIE_CFLAGS}" |
diff --git a/meta/conf/distro/include/tcmode-default.inc b/meta/conf/distro/include/tcmode-default.inc index 643394f3eb..950f29134d 100644 --- a/meta/conf/distro/include/tcmode-default.inc +++ b/meta/conf/distro/include/tcmode-default.inc | |||
@@ -16,12 +16,12 @@ PREFERRED_PROVIDER_virtual/${SDK_PREFIX}compilerlibs = "nativesdk-gcc-runtime" | |||
16 | # Default libc config | 16 | # Default libc config |
17 | PREFERRED_PROVIDER_virtual/gettext ??= "gettext" | 17 | PREFERRED_PROVIDER_virtual/gettext ??= "gettext" |
18 | 18 | ||
19 | GCCVERSION ?= "13.%" | 19 | GCCVERSION ?= "14.%" |
20 | SDKGCCVERSION ?= "${GCCVERSION}" | 20 | SDKGCCVERSION ?= "${GCCVERSION}" |
21 | BINUVERSION ?= "2.42%" | 21 | BINUVERSION ?= "2.42%" |
22 | GDBVERSION ?= "14.%" | 22 | GDBVERSION ?= "14.%" |
23 | GLIBCVERSION ?= "2.39%" | 23 | GLIBCVERSION ?= "2.39%" |
24 | LINUXLIBCVERSION ?= "6.6%" | 24 | LINUXLIBCVERSION ?= "6.9%" |
25 | QEMUVERSION ?= "8.2%" | 25 | QEMUVERSION ?= "8.2%" |
26 | GOVERSION ?= "1.22%" | 26 | GOVERSION ?= "1.22%" |
27 | RUSTVERSION ?= "1.75%" | 27 | RUSTVERSION ?= "1.75%" |
diff --git a/meta/conf/distro/include/yocto-uninative.inc b/meta/conf/distro/include/yocto-uninative.inc index 4ac66fd506..657c1032f9 100644 --- a/meta/conf/distro/include/yocto-uninative.inc +++ b/meta/conf/distro/include/yocto-uninative.inc | |||
@@ -7,9 +7,9 @@ | |||
7 | # | 7 | # |
8 | 8 | ||
9 | UNINATIVE_MAXGLIBCVERSION = "2.39" | 9 | UNINATIVE_MAXGLIBCVERSION = "2.39" |
10 | UNINATIVE_VERSION = "4.4" | 10 | UNINATIVE_VERSION = "4.5" |
11 | 11 | ||
12 | UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/${UNINATIVE_VERSION}/" | 12 | UNINATIVE_URL ?= "http://downloads.yoctoproject.org/releases/uninative/${UNINATIVE_VERSION}/" |
13 | UNINATIVE_CHECKSUM[aarch64] ?= "b61876130f494f75092f21086b4a64ea5fb064045769bf1d32e9cb6af17ea8ec" | 13 | UNINATIVE_CHECKSUM[aarch64] ?= "df2e29e2e6feb187a3499abf3b1322a3b251da819c77a7b19d4fe952351365ab" |
14 | UNINATIVE_CHECKSUM[i686] ?= "9f28627828f0082cc0344eede4d9a861a9a064bfa8f36e072e46212f0fe45fcc" | 14 | UNINATIVE_CHECKSUM[i686] ?= "8ef3eda53428b484c20157f6ec3c130b03080b3d4b3889067e0e184e05102d35" |
15 | UNINATIVE_CHECKSUM[x86_64] ?= "d81c54284be2bb886931fc87281d58177a2cd381cf99d1981f8923039a72a302" | 15 | UNINATIVE_CHECKSUM[x86_64] ?= "43ee6a25bcf5fce16ea87076d6a96e79ead6ced90690a058d07432f902773473" |
diff --git a/meta/conf/layer.conf b/meta/conf/layer.conf index f2bca0aa5b..65eb657fd9 100644 --- a/meta/conf/layer.conf +++ b/meta/conf/layer.conf | |||
@@ -45,6 +45,7 @@ SIGGEN_EXCLUDERECIPES_ABISAFE += " \ | |||
45 | ca-certificates \ | 45 | ca-certificates \ |
46 | shared-mime-info \ | 46 | shared-mime-info \ |
47 | desktop-file-utils \ | 47 | desktop-file-utils \ |
48 | os-release \ | ||
48 | " | 49 | " |
49 | 50 | ||
50 | SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \ | 51 | SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \ |
diff --git a/meta/files/toolchain-shar-extract.sh b/meta/files/toolchain-shar-extract.sh index 4386b985bb..89d30005fd 100644 --- a/meta/files/toolchain-shar-extract.sh +++ b/meta/files/toolchain-shar-extract.sh | |||
@@ -164,7 +164,9 @@ else | |||
164 | fi | 164 | fi |
165 | 165 | ||
166 | # limit the length for target_sdk_dir, ensure the relocation behaviour in relocate_sdk.py has right result. | 166 | # limit the length for target_sdk_dir, ensure the relocation behaviour in relocate_sdk.py has right result. |
167 | if [ ${#target_sdk_dir} -gt 2048 ]; then | 167 | # This is due to ELF interpreter being set to 'a'*1024 in |
168 | # meta/recipes-core/meta/uninative-tarball.bb | ||
169 | if [ ${#target_sdk_dir} -gt 1024 ]; then | ||
168 | echo "Error: The target directory path is too long!!!" | 170 | echo "Error: The target directory path is too long!!!" |
169 | exit 1 | 171 | exit 1 |
170 | fi | 172 | fi |
diff --git a/meta/lib/bblayers/makesetup.py b/meta/lib/bblayers/makesetup.py index 99d5973760..4199b5f069 100644 --- a/meta/lib/bblayers/makesetup.py +++ b/meta/lib/bblayers/makesetup.py | |||
@@ -48,8 +48,9 @@ class MakeSetupPlugin(LayerPlugin): | |||
48 | if l_name == 'workspace': | 48 | if l_name == 'workspace': |
49 | continue | 49 | continue |
50 | if l_ismodified: | 50 | if l_ismodified: |
51 | logger.error("Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path)) | 51 | e = "Layer {name} in {path} has uncommitted modifications or is not in a git repository.".format(name=l_name,path=l_path) |
52 | return | 52 | logger.error(e) |
53 | raise Exception(e) | ||
53 | repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path) | 54 | repo_path = oe.buildcfg.get_metadata_git_toplevel(l_path) |
54 | 55 | ||
55 | if self._is_submodule(repo_path): | 56 | if self._is_submodule(repo_path): |
@@ -62,9 +63,6 @@ class MakeSetupPlugin(LayerPlugin): | |||
62 | 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}} | 63 | 'describe':oe.buildcfg.get_metadata_git_describe(repo_path)}} |
63 | if repo_path == destdir_repo: | 64 | if repo_path == destdir_repo: |
64 | repos[repo_path]['contains_this_file'] = True | 65 | repos[repo_path]['contains_this_file'] = True |
65 | if not repos[repo_path]['git-remote']['remotes'] and not repos[repo_path]['contains_this_file']: | ||
66 | logger.error("Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=repo_path)) | ||
67 | return | ||
68 | 66 | ||
69 | top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()]) | 67 | top_path = os.path.commonpath([os.path.dirname(r) for r in repos.keys()]) |
70 | 68 | ||
@@ -74,6 +72,7 @@ class MakeSetupPlugin(LayerPlugin): | |||
74 | repos_nopaths[r_nopath] = repos[r] | 72 | repos_nopaths[r_nopath] = repos[r] |
75 | r_relpath = os.path.relpath(r, top_path) | 73 | r_relpath = os.path.relpath(r, top_path) |
76 | repos_nopaths[r_nopath]['path'] = r_relpath | 74 | repos_nopaths[r_nopath]['path'] = r_relpath |
75 | repos_nopaths[r_nopath]['originpath'] = r | ||
77 | return repos_nopaths | 76 | return repos_nopaths |
78 | 77 | ||
79 | def do_make_setup(self, args): | 78 | def do_make_setup(self, args): |
diff --git a/meta/lib/bblayers/setupwriters/oe-setup-layers.py b/meta/lib/bblayers/setupwriters/oe-setup-layers.py index 59ca968ff3..8faeabfabc 100644 --- a/meta/lib/bblayers/setupwriters/oe-setup-layers.py +++ b/meta/lib/bblayers/setupwriters/oe-setup-layers.py | |||
@@ -85,6 +85,11 @@ class OeSetupLayersWriter(): | |||
85 | if not os.path.exists(args.destdir): | 85 | if not os.path.exists(args.destdir): |
86 | os.makedirs(args.destdir) | 86 | os.makedirs(args.destdir) |
87 | repos = parent.make_repo_config(args.destdir) | 87 | repos = parent.make_repo_config(args.destdir) |
88 | for r in repos.values(): | ||
89 | if not r['git-remote']['remotes'] and not r.get('contains_this_file', False): | ||
90 | e = "Layer repository in {path} does not have any remotes configured. Please add at least one with 'git remote add'.".format(path=r['originpath']) | ||
91 | raise Exception(e) | ||
92 | del r['originpath'] | ||
88 | json = {"version":"1.0","sources":repos} | 93 | json = {"version":"1.0","sources":repos} |
89 | if not repos: | 94 | if not repos: |
90 | err = "Could not determine layer sources" | 95 | err = "Could not determine layer sources" |
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py index 6774cdb794..d3b2317894 100644 --- a/meta/lib/oe/package_manager/__init__.py +++ b/meta/lib/oe/package_manager/__init__.py | |||
@@ -449,7 +449,7 @@ class PackageManager(object, metaclass=ABCMeta): | |||
449 | return res | 449 | return res |
450 | return _append(uris, base_paths) | 450 | return _append(uris, base_paths) |
451 | 451 | ||
452 | def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies): | 452 | def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies, include_self=False): |
453 | """ | 453 | """ |
454 | Go through our do_package_write_X dependencies and hardlink the packages we depend | 454 | Go through our do_package_write_X dependencies and hardlink the packages we depend |
455 | upon into the repo directory. This prevents us seeing other packages that may | 455 | upon into the repo directory. This prevents us seeing other packages that may |
@@ -486,14 +486,17 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie | |||
486 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") | 486 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") |
487 | pkgdeps = set() | 487 | pkgdeps = set() |
488 | start = [start] | 488 | start = [start] |
489 | seen = set(start) | 489 | if include_self: |
490 | seen = set() | ||
491 | else: | ||
492 | seen = set(start) | ||
490 | # Support direct dependencies (do_rootfs -> do_package_write_X) | 493 | # Support direct dependencies (do_rootfs -> do_package_write_X) |
491 | # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) | 494 | # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X) |
492 | while start: | 495 | while start: |
493 | next = [] | 496 | next = [] |
494 | for dep2 in start: | 497 | for dep2 in start: |
495 | for dep in taskdepdata[dep2][3]: | 498 | for dep in taskdepdata[dep2][3]: |
496 | if taskdepdata[dep][0] != pn: | 499 | if include_self or taskdepdata[dep][0] != pn: |
497 | if "do_" + taskname in dep: | 500 | if "do_" + taskname in dep: |
498 | pkgdeps.add(dep) | 501 | pkgdeps.add(dep) |
499 | elif dep not in seen: | 502 | elif dep not in seen: |
diff --git a/meta/lib/oe/package_manager/common_deb_ipk.py b/meta/lib/oe/package_manager/common_deb_ipk.py new file mode 100644 index 0000000000..6a1e28ee6f --- /dev/null +++ b/meta/lib/oe/package_manager/common_deb_ipk.py | |||
@@ -0,0 +1,97 @@ | |||
1 | # | ||
2 | # Copyright OpenEmbedded Contributors | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | import glob | ||
8 | import os | ||
9 | import subprocess | ||
10 | import tempfile | ||
11 | |||
12 | import bb | ||
13 | |||
14 | from oe.package_manager import opkg_query, PackageManager | ||
15 | |||
16 | class OpkgDpkgPM(PackageManager): | ||
17 | def __init__(self, d, target_rootfs): | ||
18 | """ | ||
19 | This is an abstract class. Do not instantiate this directly. | ||
20 | """ | ||
21 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
22 | |||
23 | def package_info(self, pkg): | ||
24 | """ | ||
25 | Returns a dictionary with the package info. | ||
26 | """ | ||
27 | raise NotImplementedError | ||
28 | |||
29 | def _common_package_info(self, cmd): | ||
30 | """ | ||
31 | "Returns a dictionary with the package info. | ||
32 | |||
33 | This method extracts the common parts for Opkg and Dpkg | ||
34 | """ | ||
35 | |||
36 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
37 | if proc.returncode: | ||
38 | bb.fatal("Unable to list available packages. Command '%s' " | ||
39 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
40 | elif proc.stderr: | ||
41 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
42 | |||
43 | return opkg_query(proc.stdout) | ||
44 | |||
45 | def extract(self, pkg): | ||
46 | """ | ||
47 | Returns the path to a tmpdir where resides the contents of a package. | ||
48 | |||
49 | Deleting the tmpdir is responsability of the caller. | ||
50 | """ | ||
51 | pkg_info = self.package_info(pkg) | ||
52 | if not pkg_info: | ||
53 | bb.fatal("Unable to get information for package '%s' while " | ||
54 | "trying to extract the package." % pkg) | ||
55 | |||
56 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
57 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
58 | pkg_path = pkg_info[pkg]["filepath"] | ||
59 | |||
60 | if not os.path.isfile(pkg_path): | ||
61 | bb.fatal("Unable to extract package for '%s'." | ||
62 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
63 | |||
64 | tmp_dir = tempfile.mkdtemp() | ||
65 | current_dir = os.getcwd() | ||
66 | os.chdir(tmp_dir) | ||
67 | |||
68 | try: | ||
69 | cmd = [ar_cmd, 'x', pkg_path] | ||
70 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
71 | data_tar = glob.glob("data.tar.*") | ||
72 | if len(data_tar) != 1: | ||
73 | bb.fatal("Unable to extract %s package. Failed to identify " | ||
74 | "data tarball (found tarballs '%s').", | ||
75 | pkg_path, data_tar) | ||
76 | data_tar = data_tar[0] | ||
77 | cmd = [tar_cmd, 'xf', data_tar] | ||
78 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
79 | except subprocess.CalledProcessError as e: | ||
80 | bb.utils.remove(tmp_dir, recurse=True) | ||
81 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
82 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
83 | except OSError as e: | ||
84 | bb.utils.remove(tmp_dir, recurse=True) | ||
85 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
86 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
87 | |||
88 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
89 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
90 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
91 | bb.utils.remove(os.path.join(tmp_dir, data_tar)) | ||
92 | os.chdir(current_dir) | ||
93 | |||
94 | return tmp_dir | ||
95 | |||
96 | def _handle_intercept_failure(self, registered_pkgs): | ||
97 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py index 0c23c884c1..e09e81e490 100644 --- a/meta/lib/oe/package_manager/deb/__init__.py +++ b/meta/lib/oe/package_manager/deb/__init__.py | |||
@@ -7,6 +7,7 @@ | |||
7 | import re | 7 | import re |
8 | import subprocess | 8 | import subprocess |
9 | from oe.package_manager import * | 9 | from oe.package_manager import * |
10 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
10 | 11 | ||
11 | class DpkgIndexer(Indexer): | 12 | class DpkgIndexer(Indexer): |
12 | def _create_configs(self): | 13 | def _create_configs(self): |
@@ -111,72 +112,6 @@ class PMPkgsList(PkgsList): | |||
111 | 112 | ||
112 | return opkg_query(cmd_output) | 113 | return opkg_query(cmd_output) |
113 | 114 | ||
114 | class OpkgDpkgPM(PackageManager): | ||
115 | def __init__(self, d, target_rootfs): | ||
116 | """ | ||
117 | This is an abstract class. Do not instantiate this directly. | ||
118 | """ | ||
119 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
120 | |||
121 | def package_info(self, pkg, cmd): | ||
122 | """ | ||
123 | Returns a dictionary with the package info. | ||
124 | |||
125 | This method extracts the common parts for Opkg and Dpkg | ||
126 | """ | ||
127 | |||
128 | try: | ||
129 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") | ||
130 | except subprocess.CalledProcessError as e: | ||
131 | bb.fatal("Unable to list available packages. Command '%s' " | ||
132 | "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) | ||
133 | return opkg_query(output) | ||
134 | |||
135 | def extract(self, pkg, pkg_info): | ||
136 | """ | ||
137 | Returns the path to a tmpdir where resides the contents of a package. | ||
138 | |||
139 | Deleting the tmpdir is responsability of the caller. | ||
140 | |||
141 | This method extracts the common parts for Opkg and Dpkg | ||
142 | """ | ||
143 | |||
144 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
145 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
146 | pkg_path = pkg_info[pkg]["filepath"] | ||
147 | |||
148 | if not os.path.isfile(pkg_path): | ||
149 | bb.fatal("Unable to extract package for '%s'." | ||
150 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
151 | |||
152 | tmp_dir = tempfile.mkdtemp() | ||
153 | current_dir = os.getcwd() | ||
154 | os.chdir(tmp_dir) | ||
155 | data_tar = 'data.tar.xz' | ||
156 | |||
157 | try: | ||
158 | cmd = [ar_cmd, 'x', pkg_path] | ||
159 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
160 | cmd = [tar_cmd, 'xf', data_tar] | ||
161 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
162 | except subprocess.CalledProcessError as e: | ||
163 | bb.utils.remove(tmp_dir, recurse=True) | ||
164 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
165 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
166 | except OSError as e: | ||
167 | bb.utils.remove(tmp_dir, recurse=True) | ||
168 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
169 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
170 | |||
171 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
172 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
173 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
174 | os.chdir(current_dir) | ||
175 | |||
176 | return tmp_dir | ||
177 | |||
178 | def _handle_intercept_failure(self, registered_pkgs): | ||
179 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
180 | 115 | ||
181 | class DpkgPM(OpkgDpkgPM): | 116 | class DpkgPM(OpkgDpkgPM): |
182 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): | 117 | def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True): |
@@ -496,7 +431,7 @@ class DpkgPM(OpkgDpkgPM): | |||
496 | Returns a dictionary with the package info. | 431 | Returns a dictionary with the package info. |
497 | """ | 432 | """ |
498 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) | 433 | cmd = "%s show %s" % (self.apt_cache_cmd, pkg) |
499 | pkg_info = super(DpkgPM, self).package_info(pkg, cmd) | 434 | pkg_info = self._common_package_info(cmd) |
500 | 435 | ||
501 | pkg_arch = pkg_info[pkg]["pkgarch"] | 436 | pkg_arch = pkg_info[pkg]["pkgarch"] |
502 | pkg_filename = pkg_info[pkg]["filename"] | 437 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -504,19 +439,3 @@ class DpkgPM(OpkgDpkgPM): | |||
504 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 439 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
505 | 440 | ||
506 | return pkg_info | 441 | return pkg_info |
507 | |||
508 | def extract(self, pkg): | ||
509 | """ | ||
510 | Returns the path to a tmpdir where resides the contents of a package. | ||
511 | |||
512 | Deleting the tmpdir is responsability of the caller. | ||
513 | """ | ||
514 | pkg_info = self.package_info(pkg) | ||
515 | if not pkg_info: | ||
516 | bb.fatal("Unable to get information for package '%s' while " | ||
517 | "trying to extract the package." % pkg) | ||
518 | |||
519 | tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info) | ||
520 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz")) | ||
521 | |||
522 | return tmp_dir | ||
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py index 8cc9953a02..3d998e52ff 100644 --- a/meta/lib/oe/package_manager/ipk/__init__.py +++ b/meta/lib/oe/package_manager/ipk/__init__.py | |||
@@ -8,6 +8,7 @@ import re | |||
8 | import shutil | 8 | import shutil |
9 | import subprocess | 9 | import subprocess |
10 | from oe.package_manager import * | 10 | from oe.package_manager import * |
11 | from oe.package_manager.common_deb_ipk import OpkgDpkgPM | ||
11 | 12 | ||
12 | class OpkgIndexer(Indexer): | 13 | class OpkgIndexer(Indexer): |
13 | def write_index(self): | 14 | def write_index(self): |
@@ -90,76 +91,6 @@ class PMPkgsList(PkgsList): | |||
90 | return opkg_query(cmd_output) | 91 | return opkg_query(cmd_output) |
91 | 92 | ||
92 | 93 | ||
93 | |||
94 | class OpkgDpkgPM(PackageManager): | ||
95 | def __init__(self, d, target_rootfs): | ||
96 | """ | ||
97 | This is an abstract class. Do not instantiate this directly. | ||
98 | """ | ||
99 | super(OpkgDpkgPM, self).__init__(d, target_rootfs) | ||
100 | |||
101 | def package_info(self, pkg, cmd): | ||
102 | """ | ||
103 | Returns a dictionary with the package info. | ||
104 | |||
105 | This method extracts the common parts for Opkg and Dpkg | ||
106 | """ | ||
107 | |||
108 | proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True) | ||
109 | if proc.returncode: | ||
110 | bb.fatal("Unable to list available packages. Command '%s' " | ||
111 | "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr)) | ||
112 | elif proc.stderr: | ||
113 | bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr)) | ||
114 | |||
115 | return opkg_query(proc.stdout) | ||
116 | |||
117 | def extract(self, pkg, pkg_info): | ||
118 | """ | ||
119 | Returns the path to a tmpdir where resides the contents of a package. | ||
120 | |||
121 | Deleting the tmpdir is responsability of the caller. | ||
122 | |||
123 | This method extracts the common parts for Opkg and Dpkg | ||
124 | """ | ||
125 | |||
126 | ar_cmd = bb.utils.which(os.getenv("PATH"), "ar") | ||
127 | tar_cmd = bb.utils.which(os.getenv("PATH"), "tar") | ||
128 | pkg_path = pkg_info[pkg]["filepath"] | ||
129 | |||
130 | if not os.path.isfile(pkg_path): | ||
131 | bb.fatal("Unable to extract package for '%s'." | ||
132 | "File %s doesn't exists" % (pkg, pkg_path)) | ||
133 | |||
134 | tmp_dir = tempfile.mkdtemp() | ||
135 | current_dir = os.getcwd() | ||
136 | os.chdir(tmp_dir) | ||
137 | data_tar = 'data.tar.zst' | ||
138 | |||
139 | try: | ||
140 | cmd = [ar_cmd, 'x', pkg_path] | ||
141 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
142 | cmd = [tar_cmd, 'xf', data_tar] | ||
143 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
144 | except subprocess.CalledProcessError as e: | ||
145 | bb.utils.remove(tmp_dir, recurse=True) | ||
146 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
147 | "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) | ||
148 | except OSError as e: | ||
149 | bb.utils.remove(tmp_dir, recurse=True) | ||
150 | bb.fatal("Unable to extract %s package. Command '%s' " | ||
151 | "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) | ||
152 | |||
153 | bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) | ||
154 | bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) | ||
155 | bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz")) | ||
156 | os.chdir(current_dir) | ||
157 | |||
158 | return tmp_dir | ||
159 | |||
160 | def _handle_intercept_failure(self, registered_pkgs): | ||
161 | self.mark_packages("unpacked", registered_pkgs.split()) | ||
162 | |||
163 | class OpkgPM(OpkgDpkgPM): | 94 | class OpkgPM(OpkgDpkgPM): |
164 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): | 95 | def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True): |
165 | super(OpkgPM, self).__init__(d, target_rootfs) | 96 | super(OpkgPM, self).__init__(d, target_rootfs) |
@@ -485,7 +416,7 @@ class OpkgPM(OpkgDpkgPM): | |||
485 | Returns a dictionary with the package info. | 416 | Returns a dictionary with the package info. |
486 | """ | 417 | """ |
487 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) | 418 | cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg) |
488 | pkg_info = super(OpkgPM, self).package_info(pkg, cmd) | 419 | pkg_info = self._common_package_info(cmd) |
489 | 420 | ||
490 | pkg_arch = pkg_info[pkg]["arch"] | 421 | pkg_arch = pkg_info[pkg]["arch"] |
491 | pkg_filename = pkg_info[pkg]["filename"] | 422 | pkg_filename = pkg_info[pkg]["filename"] |
@@ -493,19 +424,3 @@ class OpkgPM(OpkgDpkgPM): | |||
493 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) | 424 | os.path.join(self.deploy_dir, pkg_arch, pkg_filename) |
494 | 425 | ||
495 | return pkg_info | 426 | return pkg_info |
496 | |||
497 | def extract(self, pkg): | ||
498 | """ | ||
499 | Returns the path to a tmpdir where resides the contents of a package. | ||
500 | |||
501 | Deleting the tmpdir is responsability of the caller. | ||
502 | """ | ||
503 | pkg_info = self.package_info(pkg) | ||
504 | if not pkg_info: | ||
505 | bb.fatal("Unable to get information for package '%s' while " | ||
506 | "trying to extract the package." % pkg) | ||
507 | |||
508 | tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info) | ||
509 | bb.utils.remove(os.path.join(tmp_dir, "data.tar.zst")) | ||
510 | |||
511 | return tmp_dir | ||
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py index de1fbdd3a8..2d69a33113 100644 --- a/meta/lib/oe/recipeutils.py +++ b/meta/lib/oe/recipeutils.py | |||
@@ -818,7 +818,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, | |||
818 | instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) | 818 | instdirline = 'install -d ${D}%s' % os.path.dirname(instdestpath) |
819 | if not instdirline in instfunclines: | 819 | if not instdirline in instfunclines: |
820 | instfunclines.append(instdirline) | 820 | instfunclines.append(instdirline) |
821 | instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) | 821 | instfunclines.append('install -m %s ${UNPACKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath)) |
822 | if instfunclines: | 822 | if instfunclines: |
823 | bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) | 823 | bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines)) |
824 | 824 | ||
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py index a9f717159e..1957c97434 100644 --- a/meta/lib/oe/reproducible.py +++ b/meta/lib/oe/reproducible.py | |||
@@ -75,10 +75,10 @@ def get_source_date_epoch_from_known_files(d, sourcedir): | |||
75 | return source_date_epoch | 75 | return source_date_epoch |
76 | 76 | ||
77 | def find_git_folder(d, sourcedir): | 77 | def find_git_folder(d, sourcedir): |
78 | # First guess: WORKDIR/git | 78 | # First guess: UNPACKDIR/git |
79 | # This is the default git fetcher unpack path | 79 | # This is the default git fetcher unpack path |
80 | workdir = d.getVar('WORKDIR') | 80 | unpackdir = d.getVar('UNPACKDIR') |
81 | gitpath = os.path.join(workdir, "git/.git") | 81 | gitpath = os.path.join(unpackdir, "git/.git") |
82 | if os.path.isdir(gitpath): | 82 | if os.path.isdir(gitpath): |
83 | return gitpath | 83 | return gitpath |
84 | 84 | ||
@@ -88,15 +88,16 @@ def find_git_folder(d, sourcedir): | |||
88 | return gitpath | 88 | return gitpath |
89 | 89 | ||
90 | # Perhaps there was a subpath or destsuffix specified. | 90 | # Perhaps there was a subpath or destsuffix specified. |
91 | # Go looking in the WORKDIR | 91 | # Go looking in the UNPACKDIR |
92 | exclude = set(["build", "image", "license-destdir", "patches", "pseudo", | 92 | for root, dirs, files in os.walk(unpackdir, topdown=True): |
93 | "recipe-sysroot", "recipe-sysroot-native", "sysroot-destdir", "temp"]) | ||
94 | for root, dirs, files in os.walk(workdir, topdown=True): | ||
95 | dirs[:] = [d for d in dirs if d not in exclude] | ||
96 | if '.git' in dirs: | 93 | if '.git' in dirs: |
97 | return os.path.join(root, ".git") | 94 | return os.path.join(root, ".git") |
98 | 95 | ||
99 | bb.warn("Failed to find a git repository in WORKDIR: %s" % workdir) | 96 | for root, dirs, files in os.walk(sourcedir, topdown=True): |
97 | if '.git' in dirs: | ||
98 | return os.path.join(root, ".git") | ||
99 | |||
100 | bb.warn("Failed to find a git repository in UNPACKDIR: %s" % unpackdir) | ||
100 | return None | 101 | return None |
101 | 102 | ||
102 | def get_source_date_epoch_from_git(d, sourcedir): | 103 | def get_source_date_epoch_from_git(d, sourcedir): |
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py index a46e5502ab..db3c409216 100644 --- a/meta/lib/oe/sstatesig.py +++ b/meta/lib/oe/sstatesig.py | |||
@@ -327,7 +327,6 @@ class SignatureGeneratorOEEquivHash(SignatureGeneratorOEBasicHashMixIn, bb.sigge | |||
327 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') | 327 | self.method = data.getVar('SSTATE_HASHEQUIV_METHOD') |
328 | if not self.method: | 328 | if not self.method: |
329 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") | 329 | bb.fatal("OEEquivHash requires SSTATE_HASHEQUIV_METHOD to be set") |
330 | self.max_parallel = int(data.getVar('BB_HASHSERVE_MAX_PARALLEL') or 1) | ||
331 | self.username = data.getVar("BB_HASHSERVE_USERNAME") | 330 | self.username = data.getVar("BB_HASHSERVE_USERNAME") |
332 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") | 331 | self.password = data.getVar("BB_HASHSERVE_PASSWORD") |
333 | if not self.username or not self.password: | 332 | if not self.username or not self.password: |
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json deleted file mode 100644 index 5cfa653843..0000000000 --- a/meta/lib/oeqa/manual/crops.json +++ /dev/null | |||
@@ -1,294 +0,0 @@ | |||
1 | [ | ||
2 | { | ||
3 | "test": { | ||
4 | "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make", | ||
5 | "author": [ | ||
6 | { | ||
7 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
8 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
9 | } | ||
10 | ], | ||
11 | "execution": { | ||
12 | "1": { | ||
13 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
14 | "expected_results": "" | ||
15 | }, | ||
16 | "2": { | ||
17 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
18 | "expected_results": "" | ||
19 | }, | ||
20 | "3": { | ||
21 | "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n", | ||
22 | "expected_results": "" | ||
23 | }, | ||
24 | "4": { | ||
25 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
26 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
27 | }, | ||
28 | "5": { | ||
29 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
30 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces." | ||
31 | }, | ||
32 | "6": { | ||
33 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
34 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
35 | }, | ||
36 | "7": { | ||
37 | "action": " run command which devtool \n\n", | ||
38 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n " | ||
39 | }, | ||
40 | "8": { | ||
41 | "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n", | ||
42 | "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb" | ||
43 | }, | ||
44 | "9": { | ||
45 | "action": " devtool build myapp \n\n", | ||
46 | "expected_results": "This should compile an image" | ||
47 | }, | ||
48 | "10": { | ||
49 | "action": " devtool reset myapp ", | ||
50 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase." | ||
51 | } | ||
52 | }, | ||
53 | "summary": "sdkext_eSDK_devtool_build_make" | ||
54 | } | ||
55 | }, | ||
56 | { | ||
57 | "test": { | ||
58 | "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package", | ||
59 | "author": [ | ||
60 | { | ||
61 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
62 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
63 | } | ||
64 | ], | ||
65 | "execution": { | ||
66 | "1": { | ||
67 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
68 | "expected_results": "" | ||
69 | }, | ||
70 | "2": { | ||
71 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
72 | "expected_results": "" | ||
73 | }, | ||
74 | "3": { | ||
75 | "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n", | ||
76 | "expected_results": "" | ||
77 | }, | ||
78 | "4": { | ||
79 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
80 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
81 | }, | ||
82 | "5": { | ||
83 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
84 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n" | ||
85 | }, | ||
86 | "6": { | ||
87 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
88 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
89 | }, | ||
90 | "7": { | ||
91 | "action": " run command which devtool \n\n", | ||
92 | "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
93 | }, | ||
94 | "8": { | ||
95 | "action": " devtool add myapp <directory> (this is myapp dir) \n\n", | ||
96 | "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n" | ||
97 | }, | ||
98 | "9": { | ||
99 | "action": " devtool package myapp \n\n", | ||
100 | "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n" | ||
101 | }, | ||
102 | "10": { | ||
103 | "action": " devtool reset myapp ", | ||
104 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>" | ||
105 | } | ||
106 | }, | ||
107 | "summary": "sdkext_devtool_build_esdk_package" | ||
108 | } | ||
109 | }, | ||
110 | { | ||
111 | "test": { | ||
112 | "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake", | ||
113 | "author": [ | ||
114 | { | ||
115 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
116 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
117 | } | ||
118 | ], | ||
119 | "execution": { | ||
120 | "1": { | ||
121 | "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
122 | "expected_results": "" | ||
123 | }, | ||
124 | "2": { | ||
125 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
126 | "expected_results": "" | ||
127 | }, | ||
128 | "3": { | ||
129 | "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n", | ||
130 | "expected_results": "" | ||
131 | }, | ||
132 | "4": { | ||
133 | "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n", | ||
134 | "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n" | ||
135 | }, | ||
136 | "5": { | ||
137 | "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n", | ||
138 | "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n" | ||
139 | }, | ||
140 | "6": { | ||
141 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
142 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
143 | }, | ||
144 | "7": { | ||
145 | "action": " run command which devtool \n\n", | ||
146 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
147 | }, | ||
148 | "8": { | ||
149 | "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n", | ||
150 | "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n" | ||
151 | }, | ||
152 | "9": { | ||
153 | "action": " devtool build myapp \n\n", | ||
154 | "expected_results": "This should compile an image \n\n" | ||
155 | }, | ||
156 | "10": { | ||
157 | "action": " devtool reset myapp ", | ||
158 | "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. " | ||
159 | } | ||
160 | }, | ||
161 | "summary": "sdkext_devtool_build_cmake" | ||
162 | } | ||
163 | }, | ||
164 | { | ||
165 | "test": { | ||
166 | "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation", | ||
167 | "author": [ | ||
168 | { | ||
169 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
170 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
171 | } | ||
172 | ], | ||
173 | "execution": { | ||
174 | "1": { | ||
175 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n", | ||
176 | "expected_results": "" | ||
177 | }, | ||
178 | "2": { | ||
179 | "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
180 | "expected_results": "" | ||
181 | }, | ||
182 | "3": { | ||
183 | "action": " source environment-setup-i586-poky-linux \n\n", | ||
184 | "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
185 | }, | ||
186 | "4": { | ||
187 | "action": "run command which devtool \n\n", | ||
188 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
189 | }, | ||
190 | "5": { | ||
191 | "action": "devtool sdk-install -s libxml2 \n\n", | ||
192 | "expected_results": "this should install libxml2 \n\n" | ||
193 | }, | ||
194 | "6": { | ||
195 | "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n", | ||
196 | "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n" | ||
197 | }, | ||
198 | "7": { | ||
199 | "action": "devtool build librdfa \n\n", | ||
200 | "expected_results": "This should compile \n\n" | ||
201 | }, | ||
202 | "8": { | ||
203 | "action": "devtool reset librdfa ", | ||
204 | "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase." | ||
205 | } | ||
206 | }, | ||
207 | "summary": "sdkext_extend_autotools_recipe_creation" | ||
208 | } | ||
209 | }, | ||
210 | { | ||
211 | "test": { | ||
212 | "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule", | ||
213 | "author": [ | ||
214 | { | ||
215 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
216 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
217 | } | ||
218 | ], | ||
219 | "execution": { | ||
220 | "1": { | ||
221 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n", | ||
222 | "expected_results": "" | ||
223 | }, | ||
224 | "2": { | ||
225 | "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
226 | "expected_results": "" | ||
227 | }, | ||
228 | "3": { | ||
229 | "action": "source environment-setup-i586-poky-linux \n\n", | ||
230 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n" | ||
231 | }, | ||
232 | "4": { | ||
233 | "action": "run command which devtool \n\n", | ||
234 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
235 | }, | ||
236 | "5": { | ||
237 | "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n", | ||
238 | "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb " | ||
239 | }, | ||
240 | "6": { | ||
241 | "action": "devtool build kernel-module-hello-world \n\n", | ||
242 | "expected_results": "This should compile an image \n\n" | ||
243 | }, | ||
244 | "7": { | ||
245 | "action": "devtool reset kernel-module-hello-world ", | ||
246 | "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase." | ||
247 | } | ||
248 | }, | ||
249 | "summary": "sdkext_devtool_kernelmodule" | ||
250 | } | ||
251 | }, | ||
252 | { | ||
253 | "test": { | ||
254 | "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs", | ||
255 | "author": [ | ||
256 | { | ||
257 | "email": "francisco.j.pedraza.gonzalez@intel.com", | ||
258 | "name": "francisco.j.pedraza.gonzalez@intel.com" | ||
259 | } | ||
260 | ], | ||
261 | "execution": { | ||
262 | "1": { | ||
263 | "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n", | ||
264 | "expected_results": "" | ||
265 | }, | ||
266 | "2": { | ||
267 | "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n", | ||
268 | "expected_results": "" | ||
269 | }, | ||
270 | "3": { | ||
271 | "action": "source environment-setup-i586-poky-linux \n\n", | ||
272 | "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n" | ||
273 | }, | ||
274 | "4": { | ||
275 | "action": "run command which devtool \n\n", | ||
276 | "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n" | ||
277 | }, | ||
278 | "5": { | ||
279 | "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n", | ||
280 | "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n" | ||
281 | }, | ||
282 | "6": { | ||
283 | "action": "devtool build npm \n\n", | ||
284 | "expected_results": "This should compile an image \n\n" | ||
285 | }, | ||
286 | "7": { | ||
287 | "action": " devtool reset npm", | ||
288 | "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase." | ||
289 | } | ||
290 | }, | ||
291 | "summary": "sdkext_recipes_for_nodejs" | ||
292 | } | ||
293 | } | ||
294 | ] | ||
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json deleted file mode 100644 index 6c110d0656..0000000000 --- a/meta/lib/oeqa/manual/eclipse-plugin.json +++ /dev/null | |||
@@ -1,322 +0,0 @@ | |||
1 | [ | ||
2 | { | ||
3 | "test": { | ||
4 | "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target", | ||
5 | "author": [ | ||
6 | { | ||
7 | "email": "ee.peng.yeoh@intel.com", | ||
8 | "name": "ee.peng.yeoh@intel.com" | ||
9 | } | ||
10 | ], | ||
11 | "execution": { | ||
12 | "1": { | ||
13 | "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ", | ||
14 | "expected_results": "the connection based on SSH could be set up." | ||
15 | }, | ||
16 | "2": { | ||
17 | "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ", | ||
18 | "expected_results": "" | ||
19 | }, | ||
20 | "3": { | ||
21 | "action": "Then right click to connect, input the user ID and password. ", | ||
22 | "expected_results": "" | ||
23 | }, | ||
24 | "4": { | ||
25 | "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe", | ||
26 | "expected_results": "" | ||
27 | } | ||
28 | }, | ||
29 | "summary": "support_SSH_connection_to_Target" | ||
30 | } | ||
31 | }, | ||
32 | { | ||
33 | "test": { | ||
34 | "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse", | ||
35 | "author": [ | ||
36 | { | ||
37 | "email": "ee.peng.yeoh@intel.com", | ||
38 | "name": "ee.peng.yeoh@intel.com" | ||
39 | } | ||
40 | ], | ||
41 | "execution": { | ||
42 | "1": { | ||
43 | "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n", | ||
44 | "expected_results": "" | ||
45 | }, | ||
46 | "2": { | ||
47 | "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n", | ||
48 | "expected_results": " Qemu can be lauched normally." | ||
49 | }, | ||
50 | "3": { | ||
51 | "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n", | ||
52 | "expected_results": "" | ||
53 | }, | ||
54 | "4": { | ||
55 | "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n", | ||
56 | "expected_results": "" | ||
57 | }, | ||
58 | "5": { | ||
59 | "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n", | ||
60 | "expected_results": "" | ||
61 | }, | ||
62 | "6": { | ||
63 | "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n", | ||
64 | "expected_results": "" | ||
65 | }, | ||
66 | "7": { | ||
67 | "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n", | ||
68 | "expected_results": "" | ||
69 | } | ||
70 | }, | ||
71 | "summary": "Launch_QEMU_from_Eclipse" | ||
72 | } | ||
73 | }, | ||
74 | { | ||
75 | "test": { | ||
76 | "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project", | ||
77 | "author": [ | ||
78 | { | ||
79 | "email": "ee.peng.yeoh@intel.com", | ||
80 | "name": "ee.peng.yeoh@intel.com" | ||
81 | } | ||
82 | ], | ||
83 | "execution": { | ||
84 | "1": { | ||
85 | "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ", | ||
86 | "expected_results": "" | ||
87 | }, | ||
88 | "2": { | ||
89 | "action": "Select File -> New -> Project.", | ||
90 | "expected_results": "" | ||
91 | }, | ||
92 | "3": { | ||
93 | "action": "Double click C/C++.", | ||
94 | "expected_results": "" | ||
95 | }, | ||
96 | "4": { | ||
97 | "action": "Click C or C++ Project to create the project.", | ||
98 | "expected_results": "" | ||
99 | }, | ||
100 | "5": { | ||
101 | "action": "Expand Yocto ADT Project.", | ||
102 | "expected_results": "" | ||
103 | }, | ||
104 | "6": { | ||
105 | "action": "Select Hello World ANSI C Autotools Project.", | ||
106 | "expected_results": "" | ||
107 | }, | ||
108 | "7": { | ||
109 | "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n", | ||
110 | "expected_results": "" | ||
111 | }, | ||
112 | "8": { | ||
113 | "action": "Click Next.", | ||
114 | "expected_results": "" | ||
115 | }, | ||
116 | "9": { | ||
117 | "action": "Add information in the Author and Copyright notice fields. \n1", | ||
118 | "expected_results": "" | ||
119 | }, | ||
120 | "10": { | ||
121 | "action": "Click Finish. \n1", | ||
122 | "expected_results": "" | ||
123 | }, | ||
124 | "11": { | ||
125 | "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1", | ||
126 | "expected_results": "" | ||
127 | }, | ||
128 | "12": { | ||
129 | "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1", | ||
130 | "expected_results": "" | ||
131 | }, | ||
132 | "13": { | ||
133 | "action": "In the Project Explorer window, right click the project -> Build project. \n1", | ||
134 | "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n" | ||
135 | }, | ||
136 | "14": { | ||
137 | "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1", | ||
138 | "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target." | ||
139 | }, | ||
140 | "15": { | ||
141 | "action": "After all settings are done, select the Run button on the bottom right corner \n\n1", | ||
142 | "expected_results": "" | ||
143 | }, | ||
144 | "16": { | ||
145 | "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1", | ||
146 | "expected_results": "" | ||
147 | }, | ||
148 | "17": { | ||
149 | "action": "After all settings are done, select the Debug button on the bottom right corner", | ||
150 | "expected_results": "" | ||
151 | } | ||
152 | }, | ||
153 | "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project" | ||
154 | } | ||
155 | }, | ||
156 | { | ||
157 | "test": { | ||
158 | "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project", | ||
159 | "author": [ | ||
160 | { | ||
161 | "email": "ee.peng.yeoh@intel.com", | ||
162 | "name": "ee.peng.yeoh@intel.com" | ||
163 | } | ||
164 | ], | ||
165 | "execution": { | ||
166 | "1": { | ||
167 | "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ", | ||
168 | "expected_results": "" | ||
169 | }, | ||
170 | "2": { | ||
171 | "action": "Select File -> New -> Project. ", | ||
172 | "expected_results": "" | ||
173 | }, | ||
174 | "3": { | ||
175 | "action": "Double click C/C++. ", | ||
176 | "expected_results": "" | ||
177 | }, | ||
178 | "4": { | ||
179 | "action": "Click C or C++ Project to create the project. ", | ||
180 | "expected_results": "" | ||
181 | }, | ||
182 | "5": { | ||
183 | "action": "Expand Yocto ADT Project. ", | ||
184 | "expected_results": "" | ||
185 | }, | ||
186 | "6": { | ||
187 | "action": "Select Hello World ANSI C++ Autotools Project. ", | ||
188 | "expected_results": "" | ||
189 | }, | ||
190 | "7": { | ||
191 | "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n", | ||
192 | "expected_results": "" | ||
193 | }, | ||
194 | "8": { | ||
195 | "action": "Click Next.", | ||
196 | "expected_results": "" | ||
197 | }, | ||
198 | "9": { | ||
199 | "action": "Add information in the Author and Copyright notice fields.", | ||
200 | "expected_results": "" | ||
201 | }, | ||
202 | "10": { | ||
203 | "action": "Click Finish. \n1", | ||
204 | "expected_results": "" | ||
205 | }, | ||
206 | "11": { | ||
207 | "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1", | ||
208 | "expected_results": "" | ||
209 | }, | ||
210 | "12": { | ||
211 | "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1", | ||
212 | "expected_results": "" | ||
213 | }, | ||
214 | "13": { | ||
215 | "action": "In the Project Explorer window, right click the project -> Build project. \n\n1", | ||
216 | "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n" | ||
217 | }, | ||
218 | "14": { | ||
219 | "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1", | ||
220 | "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target." | ||
221 | }, | ||
222 | "15": { | ||
223 | "action": "After all settings are done, select the Run button on the bottom right corner \n\n1", | ||
224 | "expected_results": "" | ||
225 | }, | ||
226 | "16": { | ||
227 | "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1", | ||
228 | "expected_results": "" | ||
229 | }, | ||
230 | "17": { | ||
231 | "action": "After all settings are done, select the Debug button on the bottom right corner", | ||
232 | "expected_results": "" | ||
233 | } | ||
234 | }, | ||
235 | "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project" | ||
236 | } | ||
237 | }, | ||
238 | { | ||
239 | "test": { | ||
240 | "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source", | ||
241 | "author": [ | ||
242 | { | ||
243 | "email": "laurentiu.serban@intel.com", | ||
244 | "name": "laurentiu.serban@intel.com" | ||
245 | } | ||
246 | ], | ||
247 | "execution": { | ||
248 | "1": { | ||
249 | "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n", | ||
250 | "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n" | ||
251 | }, | ||
252 | "2": { | ||
253 | "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n", | ||
254 | "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin " | ||
255 | }, | ||
256 | "3": { | ||
257 | "action": "Move to scripts/ folder. \n\n", | ||
258 | "expected_results": "" | ||
259 | }, | ||
260 | "4": { | ||
261 | "action": "Run ./setup.sh \n\n", | ||
262 | "expected_results": "" | ||
263 | }, | ||
264 | "5": { | ||
265 | "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n", | ||
266 | "expected_results": "" | ||
267 | }, | ||
268 | "6": { | ||
269 | "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n", | ||
270 | "expected_results": "" | ||
271 | }, | ||
272 | "7": { | ||
273 | "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n", | ||
274 | "expected_results": "" | ||
275 | }, | ||
276 | "8": { | ||
277 | "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n", | ||
278 | "expected_results": "" | ||
279 | } | ||
280 | }, | ||
281 | "summary": "Build_Eclipse_Plugin_from_source" | ||
282 | } | ||
283 | }, | ||
284 | { | ||
285 | "test": { | ||
286 | "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup", | ||
287 | "author": [ | ||
288 | { | ||
289 | "email": "ee.peng.yeoh@intel.com", | ||
290 | "name": "ee.peng.yeoh@intel.com" | ||
291 | } | ||
292 | ], | ||
293 | "execution": { | ||
294 | "1": { | ||
295 | "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ", | ||
296 | "expected_results": "" | ||
297 | }, | ||
298 | "2": { | ||
299 | "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ", | ||
300 | "expected_result": "" | ||
301 | }, | ||
302 | "3": { | ||
303 | "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ", | ||
304 | "expected_result": "" | ||
305 | }, | ||
306 | "4": { | ||
307 | "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ", | ||
308 | "expected_results": "" | ||
309 | }, | ||
310 | "5": { | ||
311 | "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ", | ||
312 | "expected_results": "" | ||
313 | }, | ||
314 | "6": { | ||
315 | "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ", | ||
316 | "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window." | ||
317 | } | ||
318 | }, | ||
319 | "summary": "Eclipse_Poky_installation_and_setup" | ||
320 | } | ||
321 | } | ||
322 | ] | ||
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py index 5481e1d840..8bf571663b 100644 --- a/meta/lib/oeqa/runtime/cases/systemd.py +++ b/meta/lib/oeqa/runtime/cases/systemd.py | |||
@@ -145,7 +145,8 @@ class SystemdServiceTests(SystemdTest): | |||
145 | Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, | 145 | Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, |
146 | extracted from the minidebuginfo metadata (.gnu_debugdata elf section). | 146 | extracted from the minidebuginfo metadata (.gnu_debugdata elf section). |
147 | """ | 147 | """ |
148 | t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",)) | 148 | # use "env sleep" instead of "sleep" to avoid calling the shell builtin function |
149 | t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",)) | ||
149 | t_thread.start() | 150 | t_thread.start() |
150 | time.sleep(1) | 151 | time.sleep(1) |
151 | 152 | ||
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/autotools.py index 51003b19cd..848e9392ec 100644 --- a/meta/lib/oeqa/sdk/cases/buildcpio.py +++ b/meta/lib/oeqa/sdk/cases/autotools.py | |||
@@ -7,13 +7,12 @@ | |||
7 | import os | 7 | import os |
8 | import tempfile | 8 | import tempfile |
9 | import subprocess | 9 | import subprocess |
10 | import unittest | ||
11 | 10 | ||
12 | from oeqa.sdk.case import OESDKTestCase | 11 | from oeqa.sdk.case import OESDKTestCase |
13 | from oeqa.utils.subprocesstweak import errors_have_output | 12 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 13 | errors_have_output() |
15 | 14 | ||
16 | class BuildCpioTest(OESDKTestCase): | 15 | class AutotoolsTest(OESDKTestCase): |
17 | """ | 16 | """ |
18 | Check that autotools will cross-compile correctly. | 17 | Check that autotools will cross-compile correctly. |
19 | """ | 18 | """ |
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/cmake.py index e986838aea..db7d826a38 100644 --- a/meta/lib/oeqa/sdk/cases/assimp.py +++ b/meta/lib/oeqa/sdk/cases/cmake.py | |||
@@ -13,7 +13,7 @@ from oeqa.sdk.case import OESDKTestCase | |||
13 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 14 | errors_have_output() |
15 | 15 | ||
16 | class BuildAssimp(OESDKTestCase): | 16 | class CMakeTest(OESDKTestCase): |
17 | """ | 17 | """ |
18 | Test case to build a project using cmake. | 18 | Test case to build a project using cmake. |
19 | """ | 19 | """ |
@@ -21,14 +21,14 @@ class BuildAssimp(OESDKTestCase): | |||
21 | def setUp(self): | 21 | def setUp(self): |
22 | if not (self.tc.hasHostPackage("nativesdk-cmake") or | 22 | if not (self.tc.hasHostPackage("nativesdk-cmake") or |
23 | self.tc.hasHostPackage("cmake-native")): | 23 | self.tc.hasHostPackage("cmake-native")): |
24 | raise unittest.SkipTest("Needs cmake") | 24 | raise unittest.SkipTest("CMakeTest: needs cmake") |
25 | 25 | ||
26 | def test_assimp(self): | 26 | def test_assimp(self): |
27 | with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: | 27 | with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: |
28 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.3.1.tar.gz") | 28 | tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.4.1.tar.gz") |
29 | 29 | ||
30 | dirs = {} | 30 | dirs = {} |
31 | dirs["source"] = os.path.join(testdir, "assimp-5.3.1") | 31 | dirs["source"] = os.path.join(testdir, "assimp-5.4.1") |
32 | dirs["build"] = os.path.join(testdir, "build") | 32 | dirs["build"] = os.path.join(testdir, "build") |
33 | dirs["install"] = os.path.join(testdir, "install") | 33 | dirs["install"] = os.path.join(testdir, "install") |
34 | 34 | ||
@@ -39,7 +39,7 @@ class BuildAssimp(OESDKTestCase): | |||
39 | self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs)) | 39 | self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs)) |
40 | os.makedirs(dirs["build"]) | 40 | os.makedirs(dirs["build"]) |
41 | 41 | ||
42 | self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs)) | 42 | self._run("cd {build} && cmake -DASSIMP_WARNINGS_AS_ERRORS=OFF -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs)) |
43 | self._run("cmake --build {build} -- -j".format(**dirs)) | 43 | self._run("cmake --build {build} -- -j".format(**dirs)) |
44 | self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) | 44 | self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) |
45 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.3.0")) | 45 | self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.4.1")) |
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/gtk3.py index 178f07472d..c329c4bb86 100644 --- a/meta/lib/oeqa/sdk/cases/buildgalculator.py +++ b/meta/lib/oeqa/sdk/cases/gtk3.py | |||
@@ -13,7 +13,7 @@ from oeqa.sdk.case import OESDKTestCase | |||
13 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 14 | errors_have_output() |
15 | 15 | ||
16 | class GalculatorTest(OESDKTestCase): | 16 | class GTK3Test(OESDKTestCase): |
17 | """ | 17 | """ |
18 | Test that autotools and GTK+ 3 compiles correctly. | 18 | Test that autotools and GTK+ 3 compiles correctly. |
19 | """ | 19 | """ |
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/makefile.py index b4b7d85b88..2ff54ce25f 100644 --- a/meta/lib/oeqa/sdk/cases/buildlzip.py +++ b/meta/lib/oeqa/sdk/cases/makefile.py | |||
@@ -4,12 +4,12 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import os, tempfile, subprocess, unittest | 7 | import os, tempfile, subprocess |
8 | from oeqa.sdk.case import OESDKTestCase | 8 | from oeqa.sdk.case import OESDKTestCase |
9 | from oeqa.utils.subprocesstweak import errors_have_output | 9 | from oeqa.utils.subprocesstweak import errors_have_output |
10 | errors_have_output() | 10 | errors_have_output() |
11 | 11 | ||
12 | class BuildLzipTest(OESDKTestCase): | 12 | class MakefileTest(OESDKTestCase): |
13 | """ | 13 | """ |
14 | Test that "plain" compilation works, using just $CC $CFLAGS etc. | 14 | Test that "plain" compilation works, using just $CC $CFLAGS etc. |
15 | """ | 15 | """ |
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py index ea10f568b2..20f6b553d0 100644 --- a/meta/lib/oeqa/sdk/cases/maturin.py +++ b/meta/lib/oeqa/sdk/cases/maturin.py | |||
@@ -8,7 +8,6 @@ import os | |||
8 | import shutil | 8 | import shutil |
9 | import unittest | 9 | import unittest |
10 | 10 | ||
11 | from oeqa.core.utils.path import remove_safe | ||
12 | from oeqa.sdk.case import OESDKTestCase | 11 | from oeqa.sdk.case import OESDKTestCase |
13 | from oeqa.utils.subprocesstweak import errors_have_output | 12 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | 13 | ||
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/meson.py index 147ee3e0ee..be53df204a 100644 --- a/meta/lib/oeqa/sdk/cases/buildepoxy.py +++ b/meta/lib/oeqa/sdk/cases/meson.py | |||
@@ -13,14 +13,14 @@ from oeqa.sdk.case import OESDKTestCase | |||
13 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
14 | errors_have_output() | 14 | errors_have_output() |
15 | 15 | ||
16 | class EpoxyTest(OESDKTestCase): | 16 | class MesonTest(OESDKTestCase): |
17 | """ | 17 | """ |
18 | Test that Meson builds correctly. | 18 | Test that Meson builds correctly. |
19 | """ | 19 | """ |
20 | def setUp(self): | 20 | def setUp(self): |
21 | if not (self.tc.hasHostPackage("nativesdk-meson") or | 21 | if not (self.tc.hasHostPackage("nativesdk-meson") or |
22 | self.tc.hasHostPackage("meson-native")): | 22 | self.tc.hasHostPackage("meson-native")): |
23 | raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson") | 23 | raise unittest.SkipTest("MesonTest: needs meson") |
24 | 24 | ||
25 | def test_epoxy(self): | 25 | def test_epoxy(self): |
26 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: | 26 | with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: |
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py index 5ea992b9f3..51284949f5 100644 --- a/meta/lib/oeqa/sdk/cases/python.py +++ b/meta/lib/oeqa/sdk/cases/python.py | |||
@@ -4,7 +4,7 @@ | |||
4 | # SPDX-License-Identifier: MIT | 4 | # SPDX-License-Identifier: MIT |
5 | # | 5 | # |
6 | 6 | ||
7 | import subprocess, unittest | 7 | import unittest |
8 | from oeqa.sdk.case import OESDKTestCase | 8 | from oeqa.sdk.case import OESDKTestCase |
9 | 9 | ||
10 | from oeqa.utils.subprocesstweak import errors_have_output | 10 | from oeqa.utils.subprocesstweak import errors_have_output |
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py index f5d437bb19..a54245851b 100644 --- a/meta/lib/oeqa/sdk/cases/rust.py +++ b/meta/lib/oeqa/sdk/cases/rust.py | |||
@@ -8,7 +8,6 @@ import os | |||
8 | import shutil | 8 | import shutil |
9 | import unittest | 9 | import unittest |
10 | 10 | ||
11 | from oeqa.core.utils.path import remove_safe | ||
12 | from oeqa.sdk.case import OESDKTestCase | 11 | from oeqa.sdk.case import OESDKTestCase |
13 | 12 | ||
14 | from oeqa.utils.subprocesstweak import errors_have_output | 13 | from oeqa.utils.subprocesstweak import errors_have_output |
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py index 5ffb732556..d0746e68eb 100644 --- a/meta/lib/oeqa/sdkext/cases/devtool.py +++ b/meta/lib/oeqa/sdkext/cases/devtool.py | |||
@@ -69,10 +69,9 @@ class DevtoolTest(OESDKExtTestCase): | |||
69 | self._test_devtool_build(self.myapp_cmake_dst) | 69 | self._test_devtool_build(self.myapp_cmake_dst) |
70 | 70 | ||
71 | def test_extend_autotools_recipe_creation(self): | 71 | def test_extend_autotools_recipe_creation(self): |
72 | req = 'https://github.com/rdfa/librdfa' | 72 | recipe = "test-dbus-wait" |
73 | recipe = "librdfa" | 73 | self._run('devtool sdk-install dbus') |
74 | self._run('devtool sdk-install libxml2') | 74 | self._run('devtool add %s https://git.yoctoproject.org/git/dbus-wait' % (recipe) ) |
75 | self._run('devtool add %s %s' % (recipe, req) ) | ||
76 | try: | 75 | try: |
77 | self._run('devtool build %s' % recipe) | 76 | self._run('devtool build %s' % recipe) |
78 | finally: | 77 | finally: |
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py index 505b4be837..46c0cd87bb 100644 --- a/meta/lib/oeqa/selftest/cases/debuginfod.py +++ b/meta/lib/oeqa/selftest/cases/debuginfod.py | |||
@@ -62,7 +62,7 @@ class Debuginfod(OESelftestTestCase): | |||
62 | 62 | ||
63 | raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest) | 63 | raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest) |
64 | 64 | ||
65 | def start_debuginfod(self): | 65 | def start_debuginfod(self, feed_dir): |
66 | # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot | 66 | # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot |
67 | 67 | ||
68 | # Save some useful paths for later | 68 | # Save some useful paths for later |
@@ -82,7 +82,7 @@ class Debuginfod(OESelftestTestCase): | |||
82 | # Disable rescanning, this is a one-shot test | 82 | # Disable rescanning, this is a one-shot test |
83 | "--rescan-time=0", | 83 | "--rescan-time=0", |
84 | "--groom-time=0", | 84 | "--groom-time=0", |
85 | get_bb_var("DEPLOY_DIR"), | 85 | feed_dir, |
86 | ] | 86 | ] |
87 | 87 | ||
88 | format = get_bb_var("PACKAGE_CLASSES").split()[0] | 88 | format = get_bb_var("PACKAGE_CLASSES").split()[0] |
@@ -114,11 +114,12 @@ class Debuginfod(OESelftestTestCase): | |||
114 | self.write_config(""" | 114 | self.write_config(""" |
115 | TMPDIR = "${TOPDIR}/tmp-debuginfod" | 115 | TMPDIR = "${TOPDIR}/tmp-debuginfod" |
116 | DISTRO_FEATURES:append = " debuginfod" | 116 | DISTRO_FEATURES:append = " debuginfod" |
117 | INHERIT += "localpkgfeed" | ||
117 | """) | 118 | """) |
118 | bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package") | 119 | bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed") |
119 | 120 | ||
120 | try: | 121 | try: |
121 | self.start_debuginfod() | 122 | self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz")) |
122 | 123 | ||
123 | env = os.environ.copy() | 124 | env = os.environ.copy() |
124 | env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port | 125 | env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port |
@@ -141,12 +142,13 @@ DISTRO_FEATURES:append = " debuginfod" | |||
141 | self.write_config(""" | 142 | self.write_config(""" |
142 | TMPDIR = "${TOPDIR}/tmp-debuginfod" | 143 | TMPDIR = "${TOPDIR}/tmp-debuginfod" |
143 | DISTRO_FEATURES:append = " debuginfod" | 144 | DISTRO_FEATURES:append = " debuginfod" |
145 | INHERIT += "localpkgfeed" | ||
144 | CORE_IMAGE_EXTRA_INSTALL += "elfutils xz" | 146 | CORE_IMAGE_EXTRA_INSTALL += "elfutils xz" |
145 | """) | 147 | """) |
146 | bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot") | 148 | bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed") |
147 | 149 | ||
148 | try: | 150 | try: |
149 | self.start_debuginfod() | 151 | self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz")) |
150 | 152 | ||
151 | with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: | 153 | with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: |
152 | cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port) | 154 | cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port) |
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py index 882225dde3..1cafb922ea 100644 --- a/meta/lib/oeqa/selftest/cases/devtool.py +++ b/meta/lib/oeqa/selftest/cases/devtool.py | |||
@@ -753,6 +753,25 @@ class DevtoolModifyTests(DevtoolBase): | |||
753 | result = runCmd('devtool status') | 753 | result = runCmd('devtool status') |
754 | self.assertNotIn('mdadm', result.output) | 754 | self.assertNotIn('mdadm', result.output) |
755 | 755 | ||
756 | def test_devtool_modify_go(self): | ||
757 | import oe.path | ||
758 | from tempfile import TemporaryDirectory | ||
759 | with TemporaryDirectory(prefix='devtoolqa') as tempdir: | ||
760 | self.track_for_cleanup(self.workspacedir) | ||
761 | self.add_command_to_tearDown('bitbake -c clean go-helloworld') | ||
762 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | ||
763 | result = runCmd('devtool modify go-helloworld -x %s' % tempdir) | ||
764 | self.assertExists( | ||
765 | oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'), | ||
766 | 'Extracted source could not be found' | ||
767 | ) | ||
768 | self.assertExists( | ||
769 | oe.path.join(self.workspacedir, 'conf', 'layer.conf'), | ||
770 | 'Workspace directory not created' | ||
771 | ) | ||
772 | matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend')) | ||
773 | self.assertTrue(matches, 'bbappend not created %s' % result.output) | ||
774 | |||
756 | def test_devtool_buildclean(self): | 775 | def test_devtool_buildclean(self): |
757 | def assertFile(path, *paths): | 776 | def assertFile(path, *paths): |
758 | f = os.path.join(path, *paths) | 777 | f = os.path.join(path, *paths) |
@@ -879,13 +898,8 @@ class DevtoolModifyTests(DevtoolBase): | |||
879 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) | 898 | self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) |
880 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') | 899 | self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') |
881 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) | 900 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) |
882 | srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc') | 901 | srcfile = os.path.join(tempdir, 'share/dot.bashrc') |
883 | srclink = os.path.join(tempdir, 'share/dot.bashrc') | ||
884 | self.assertExists(srcfile, 'Extracted source could not be found') | 902 | self.assertExists(srcfile, 'Extracted source could not be found') |
885 | if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink): | ||
886 | correct_symlink = True | ||
887 | self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken') | ||
888 | |||
889 | matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) | 903 | matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) |
890 | self.assertTrue(matches, 'bbappend not created') | 904 | self.assertTrue(matches, 'bbappend not created') |
891 | # Test devtool status | 905 | # Test devtool status |
@@ -956,9 +970,9 @@ class DevtoolModifyTests(DevtoolBase): | |||
956 | # others git:// in SRC_URI | 970 | # others git:// in SRC_URI |
957 | # cointains a patch | 971 | # cointains a patch |
958 | testrecipe = 'hello-rs' | 972 | testrecipe = 'hello-rs' |
959 | bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe) | 973 | bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'UNPACKDIR', 'CARGO_HOME'], testrecipe) |
960 | recipefile = bb_vars['FILE'] | 974 | recipefile = bb_vars['FILE'] |
961 | workdir = bb_vars['WORKDIR'] | 975 | unpackdir = bb_vars['UNPACKDIR'] |
962 | cargo_home = bb_vars['CARGO_HOME'] | 976 | cargo_home = bb_vars['CARGO_HOME'] |
963 | src_uri = bb_vars['SRC_URI'].split() | 977 | src_uri = bb_vars['SRC_URI'].split() |
964 | self.assertTrue(src_uri[0].startswith('git://'), | 978 | self.assertTrue(src_uri[0].startswith('git://'), |
@@ -1029,7 +1043,7 @@ class DevtoolModifyTests(DevtoolBase): | |||
1029 | self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"') | 1043 | self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"') |
1030 | raw_url = raw_url.replace("git://", '%s://' % parms['protocol']) | 1044 | raw_url = raw_url.replace("git://", '%s://' % parms['protocol']) |
1031 | patch_line = '[patch."%s"]' % raw_url | 1045 | patch_line = '[patch."%s"]' % raw_url |
1032 | path_patched = os.path.join(workdir, parms['destsuffix']) | 1046 | path_patched = os.path.join(unpackdir, parms['destsuffix']) |
1033 | path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched) | 1047 | path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched) |
1034 | # Would have been better to use tomllib to read this file :/ | 1048 | # Would have been better to use tomllib to read this file :/ |
1035 | self.assertIn(patch_line, cargo_config_contents) | 1049 | self.assertIn(patch_line, cargo_config_contents) |
@@ -1278,7 +1292,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1278 | with open(bbappendfile, 'r') as f: | 1292 | with open(bbappendfile, 'r') as f: |
1279 | self.assertEqual(expectedlines, f.readlines()) | 1293 | self.assertEqual(expectedlines, f.readlines()) |
1280 | # Drop new commit and check patch gets deleted | 1294 | # Drop new commit and check patch gets deleted |
1281 | result = runCmd('git reset HEAD^', cwd=tempsrcdir) | 1295 | result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir) |
1282 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) | 1296 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) |
1283 | self.assertNotExists(patchfile, 'Patch file not deleted') | 1297 | self.assertNotExists(patchfile, 'Patch file not deleted') |
1284 | expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', | 1298 | expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', |
@@ -1287,6 +1301,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1287 | self.assertEqual(expectedlines2, f.readlines()) | 1301 | self.assertEqual(expectedlines2, f.readlines()) |
1288 | # Put commit back and check we can run it if layer isn't in bblayers.conf | 1302 | # Put commit back and check we can run it if layer isn't in bblayers.conf |
1289 | os.remove(bbappendfile) | 1303 | os.remove(bbappendfile) |
1304 | result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir) | ||
1290 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) | 1305 | result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) |
1291 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) | 1306 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) |
1292 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) | 1307 | result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) |
@@ -1361,7 +1376,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1361 | with open(bbappendfile, 'r') as f: | 1376 | with open(bbappendfile, 'r') as f: |
1362 | self.assertEqual(expectedlines, set(f.readlines())) | 1377 | self.assertEqual(expectedlines, set(f.readlines())) |
1363 | # Drop new commit and check SRCREV changes | 1378 | # Drop new commit and check SRCREV changes |
1364 | result = runCmd('git reset HEAD^', cwd=tempsrcdir) | 1379 | result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir) |
1365 | result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) | 1380 | result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) |
1366 | self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') | 1381 | self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') |
1367 | result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) | 1382 | result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) |
@@ -1373,6 +1388,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1373 | self.assertEqual(expectedlines, set(f.readlines())) | 1388 | self.assertEqual(expectedlines, set(f.readlines())) |
1374 | # Put commit back and check we can run it if layer isn't in bblayers.conf | 1389 | # Put commit back and check we can run it if layer isn't in bblayers.conf |
1375 | os.remove(bbappendfile) | 1390 | os.remove(bbappendfile) |
1391 | result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir) | ||
1376 | result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) | 1392 | result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) |
1377 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) | 1393 | result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) |
1378 | result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) | 1394 | result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) |
@@ -1404,11 +1420,12 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1404 | # Try building just to ensure we haven't broken that | 1420 | # Try building just to ensure we haven't broken that |
1405 | bitbake("%s" % testrecipe) | 1421 | bitbake("%s" % testrecipe) |
1406 | # Edit / commit local source | 1422 | # Edit / commit local source |
1407 | runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) | 1423 | runCmd('echo "/* Foobar */" >> makedevs.c', cwd=tempdir) |
1408 | runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) | 1424 | runCmd('echo "Foo" > new-local', cwd=tempdir) |
1409 | runCmd('echo "Bar" > new-file', cwd=tempdir) | 1425 | runCmd('echo "Bar" > new-file', cwd=tempdir) |
1410 | runCmd('git add new-file', cwd=tempdir) | 1426 | runCmd('git add new-file', cwd=tempdir) |
1411 | runCmd('git commit -m "Add new file"', cwd=tempdir) | 1427 | runCmd('git commit -m "Add new file"', cwd=tempdir) |
1428 | runCmd('git add new-local', cwd=tempdir) | ||
1412 | runCmd('devtool update-recipe %s' % testrecipe) | 1429 | runCmd('devtool update-recipe %s' % testrecipe) |
1413 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | 1430 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), |
1414 | (' M', '.*/makedevs/makedevs.c$'), | 1431 | (' M', '.*/makedevs/makedevs.c$'), |
@@ -1434,8 +1451,8 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1434 | self.assertExists(local_file, 'File makedevs.c not created') | 1451 | self.assertExists(local_file, 'File makedevs.c not created') |
1435 | self.assertExists(patchfile, 'File new_local not created') | 1452 | self.assertExists(patchfile, 'File new_local not created') |
1436 | 1453 | ||
1437 | def test_devtool_update_recipe_local_files_2(self): | 1454 | def _test_devtool_update_recipe_local_files_2(self): |
1438 | """Check local source files support when oe-local-files is in Git""" | 1455 | """Check local source files support when editing local files in Git""" |
1439 | testrecipe = 'devtool-test-local' | 1456 | testrecipe = 'devtool-test-local' |
1440 | recipefile = get_bb_var('FILE', testrecipe) | 1457 | recipefile = get_bb_var('FILE', testrecipe) |
1441 | recipedir = os.path.dirname(recipefile) | 1458 | recipedir = os.path.dirname(recipefile) |
@@ -1450,17 +1467,13 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1450 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) | 1467 | result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) |
1451 | # Check git repo | 1468 | # Check git repo |
1452 | self._check_src_repo(tempdir) | 1469 | self._check_src_repo(tempdir) |
1453 | # Add oe-local-files to Git | ||
1454 | runCmd('rm oe-local-files/.gitignore', cwd=tempdir) | ||
1455 | runCmd('git add oe-local-files', cwd=tempdir) | ||
1456 | runCmd('git commit -m "Add local sources"', cwd=tempdir) | ||
1457 | # Edit / commit local sources | 1470 | # Edit / commit local sources |
1458 | runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir) | 1471 | runCmd('echo "# Foobar" >> file1', cwd=tempdir) |
1459 | runCmd('git commit -am "Edit existing file"', cwd=tempdir) | 1472 | runCmd('git commit -am "Edit existing file"', cwd=tempdir) |
1460 | runCmd('git rm oe-local-files/file2', cwd=tempdir) | 1473 | runCmd('git rm file2', cwd=tempdir) |
1461 | runCmd('git commit -m"Remove file"', cwd=tempdir) | 1474 | runCmd('git commit -m"Remove file"', cwd=tempdir) |
1462 | runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) | 1475 | runCmd('echo "Foo" > new-local', cwd=tempdir) |
1463 | runCmd('git add oe-local-files/new-local', cwd=tempdir) | 1476 | runCmd('git add new-local', cwd=tempdir) |
1464 | runCmd('git commit -m "Add new local file"', cwd=tempdir) | 1477 | runCmd('git commit -m "Add new local file"', cwd=tempdir) |
1465 | runCmd('echo "Gar" > new-file', cwd=tempdir) | 1478 | runCmd('echo "Gar" > new-file', cwd=tempdir) |
1466 | runCmd('git add new-file', cwd=tempdir) | 1479 | runCmd('git add new-file', cwd=tempdir) |
@@ -1469,7 +1482,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1469 | os.path.dirname(recipefile)) | 1482 | os.path.dirname(recipefile)) |
1470 | # Checkout unmodified file to working copy -> devtool should still pick | 1483 | # Checkout unmodified file to working copy -> devtool should still pick |
1471 | # the modified version from HEAD | 1484 | # the modified version from HEAD |
1472 | runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir) | 1485 | runCmd('git checkout HEAD^ -- file1', cwd=tempdir) |
1473 | runCmd('devtool update-recipe %s' % testrecipe) | 1486 | runCmd('devtool update-recipe %s' % testrecipe) |
1474 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), | 1487 | expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), |
1475 | (' M', '.*/file1$'), | 1488 | (' M', '.*/file1$'), |
@@ -1544,7 +1557,7 @@ class DevtoolUpdateTests(DevtoolBase): | |||
1544 | # (don't bother with cleaning the recipe on teardown, we won't be building it) | 1557 | # (don't bother with cleaning the recipe on teardown, we won't be building it) |
1545 | result = runCmd('devtool modify %s' % testrecipe) | 1558 | result = runCmd('devtool modify %s' % testrecipe) |
1546 | # Modify one file | 1559 | # Modify one file |
1547 | runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) | 1560 | runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe)) |
1548 | self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) | 1561 | self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) |
1549 | result = runCmd('devtool update-recipe %s' % testrecipe) | 1562 | result = runCmd('devtool update-recipe %s' % testrecipe) |
1550 | expected_status = [(' M', '.*/%s/file2$' % testrecipe)] | 1563 | expected_status = [(' M', '.*/%s/file2$' % testrecipe)] |
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py index 379ed589ad..64b17117cc 100644 --- a/meta/lib/oeqa/selftest/cases/layerappend.py +++ b/meta/lib/oeqa/selftest/cases/layerappend.py | |||
@@ -37,7 +37,7 @@ FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:" | |||
37 | SRC_URI:append = " file://appendtest.txt" | 37 | SRC_URI:append = " file://appendtest.txt" |
38 | 38 | ||
39 | sysroot_stage_all:append() { | 39 | sysroot_stage_all:append() { |
40 | install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ | 40 | install -m 644 ${UNPACKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ |
41 | } | 41 | } |
42 | 42 | ||
43 | """ | 43 | """ |
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py index f69efccfee..fcfe54af74 100644 --- a/meta/lib/oeqa/selftest/cases/oescripts.py +++ b/meta/lib/oeqa/selftest/cases/oescripts.py | |||
@@ -175,7 +175,7 @@ class OEListPackageconfigTests(OESelftestTestCase): | |||
175 | def test_packageconfig_flags_option_all(self): | 175 | def test_packageconfig_flags_option_all(self): |
176 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) | 176 | results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) |
177 | expected_endlines = [] | 177 | expected_endlines = [] |
178 | expected_endlines.append("pinentry-1.2.1") | 178 | expected_endlines.append("pinentry-1.3.0") |
179 | expected_endlines.append("PACKAGECONFIG ncurses") | 179 | expected_endlines.append("PACKAGECONFIG ncurses") |
180 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") | 180 | expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") |
181 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") | 181 | expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") |
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py index 126906df50..42202b7831 100644 --- a/meta/lib/oeqa/selftest/cases/recipetool.py +++ b/meta/lib/oeqa/selftest/cases/recipetool.py | |||
@@ -138,7 +138,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
138 | '\n', | 138 | '\n', |
139 | 'do_install:append() {\n', | 139 | 'do_install:append() {\n', |
140 | ' install -d ${D}${%s}\n' % dirname, | 140 | ' install -d ${D}${%s}\n' % dirname, |
141 | ' install -m 0755 ${WORKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname), | 141 | ' install -m 0755 ${UNPACKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname), |
142 | '}\n'] | 142 | '}\n'] |
143 | self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name]) | 143 | self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name]) |
144 | # Now try bbappending the same file again, contents should not change | 144 | # Now try bbappending the same file again, contents should not change |
@@ -164,7 +164,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
164 | '\n', | 164 | '\n', |
165 | 'do_install:append() {\n', | 165 | 'do_install:append() {\n', |
166 | ' install -d ${D}${datadir}\n', | 166 | ' install -d ${D}${datadir}\n', |
167 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 167 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n', |
168 | '}\n'] | 168 | '}\n'] |
169 | self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) | 169 | self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) |
170 | # Try adding another file, this time where the source file is executable | 170 | # Try adding another file, this time where the source file is executable |
@@ -179,8 +179,8 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
179 | '\n', | 179 | '\n', |
180 | 'do_install:append() {\n', | 180 | 'do_install:append() {\n', |
181 | ' install -d ${D}${datadir}\n', | 181 | ' install -d ${D}${datadir}\n', |
182 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 182 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n', |
183 | ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, | 183 | ' install -m 0755 ${UNPACKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, |
184 | '}\n'] | 184 | '}\n'] |
185 | self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) | 185 | self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) |
186 | 186 | ||
@@ -192,7 +192,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
192 | '\n', | 192 | '\n', |
193 | 'do_install:append() {\n', | 193 | 'do_install:append() {\n', |
194 | ' install -d ${D}${bindir}\n', | 194 | ' install -d ${D}${bindir}\n', |
195 | ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', | 195 | ' install -m 0755 ${UNPACKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', |
196 | '}\n'] | 196 | '}\n'] |
197 | _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) | 197 | _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) |
198 | self.assertNotIn('WARNING: ', output) | 198 | self.assertNotIn('WARNING: ', output) |
@@ -207,7 +207,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
207 | '\n', | 207 | '\n', |
208 | 'do_install:append:mymachine() {\n', | 208 | 'do_install:append:mymachine() {\n', |
209 | ' install -d ${D}${datadir}\n', | 209 | ' install -d ${D}${datadir}\n', |
210 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', | 210 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n', |
211 | '}\n'] | 211 | '}\n'] |
212 | _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) | 212 | _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) |
213 | self.assertNotIn('WARNING: ', output) | 213 | self.assertNotIn('WARNING: ', output) |
@@ -241,7 +241,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
241 | '\n', | 241 | '\n', |
242 | 'do_install:append() {\n', | 242 | 'do_install:append() {\n', |
243 | ' install -d ${D}${datadir}\n', | 243 | ' install -d ${D}${datadir}\n', |
244 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', | 244 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', |
245 | '}\n'] | 245 | '}\n'] |
246 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) | 246 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) |
247 | self.assertNotIn('WARNING: ', output) | 247 | self.assertNotIn('WARNING: ', output) |
@@ -268,7 +268,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
268 | '\n', | 268 | '\n', |
269 | 'do_install:append() {\n', | 269 | 'do_install:append() {\n', |
270 | ' install -d ${D}${sysconfdir}\n', | 270 | ' install -d ${D}${sysconfdir}\n', |
271 | ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', | 271 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', |
272 | '}\n'] | 272 | '}\n'] |
273 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) | 273 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) |
274 | for line in output.splitlines(): | 274 | for line in output.splitlines(): |
@@ -286,7 +286,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
286 | '\n', | 286 | '\n', |
287 | 'do_install:append() {\n', | 287 | 'do_install:append() {\n', |
288 | ' install -d ${D}${datadir}\n', | 288 | ' install -d ${D}${datadir}\n', |
289 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', | 289 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', |
290 | '}\n'] | 290 | '}\n'] |
291 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) | 291 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) |
292 | self.assertNotIn('WARNING: ', output) | 292 | self.assertNotIn('WARNING: ', output) |
@@ -309,7 +309,7 @@ class RecipetoolAppendTests(RecipetoolBase): | |||
309 | '\n', | 309 | '\n', |
310 | 'do_install:append() {\n', | 310 | 'do_install:append() {\n', |
311 | ' install -d ${D}${datadir}\n', | 311 | ' install -d ${D}${datadir}\n', |
312 | ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', | 312 | ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', |
313 | '}\n'] | 313 | '}\n'] |
314 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) | 314 | _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) |
315 | 315 | ||
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py index 2cb4445f81..9949737172 100644 --- a/meta/lib/oeqa/selftest/cases/recipeutils.py +++ b/meta/lib/oeqa/selftest/cases/recipeutils.py | |||
@@ -72,7 +72,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
72 | expected_patch = """ | 72 | expected_patch = """ |
73 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb | 73 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb |
74 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb | 74 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb |
75 | @@ -8,6 +8,4 @@ | 75 | @@ -11,6 +11,4 @@ |
76 | 76 | ||
77 | BBCLASSEXTEND = "native nativesdk" | 77 | BBCLASSEXTEND = "native nativesdk" |
78 | 78 | ||
@@ -97,7 +97,7 @@ class RecipeUtilsTests(OESelftestTestCase): | |||
97 | expected_patch = """ | 97 | expected_patch = """ |
98 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb | 98 | --- a/recipes-test/recipeutils/recipeutils-test_1.2.bb |
99 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb | 99 | +++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb |
100 | @@ -8,6 +8,3 @@ | 100 | @@ -11,6 +11,3 @@ |
101 | 101 | ||
102 | BBCLASSEXTEND = "native nativesdk" | 102 | BBCLASSEXTEND = "native nativesdk" |
103 | 103 | ||
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py index 80e830136f..97a9c3da90 100644 --- a/meta/lib/oeqa/selftest/cases/reproducible.py +++ b/meta/lib/oeqa/selftest/cases/reproducible.py | |||
@@ -16,6 +16,8 @@ import os | |||
16 | import datetime | 16 | import datetime |
17 | 17 | ||
18 | exclude_packages = [ | 18 | exclude_packages = [ |
19 | 'rust-rustdoc', | ||
20 | 'rust-dbg' | ||
19 | ] | 21 | ] |
20 | 22 | ||
21 | def is_excluded(package): | 23 | def is_excluded(package): |
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py index 12000aac16..13aa5f16c9 100644 --- a/meta/lib/oeqa/selftest/cases/runtime_test.py +++ b/meta/lib/oeqa/selftest/cases/runtime_test.py | |||
@@ -273,7 +273,7 @@ TEST_RUNQEMUPARAMS += " slirp" | |||
273 | import subprocess, os | 273 | import subprocess, os |
274 | 274 | ||
275 | distro = oe.lsb.distro_identifier() | 275 | distro = oe.lsb.distro_identifier() |
276 | if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or | 276 | if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or |
277 | distro.startswith('almalinux') or distro.startswith('rocky')): | 277 | distro.startswith('almalinux') or distro.startswith('rocky')): |
278 | self.skipTest('virgl headless cannot be tested with %s' %(distro)) | 278 | self.skipTest('virgl headless cannot be tested with %s' %(distro)) |
279 | 279 | ||
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py index ad14189c6d..4ccbe9867b 100644 --- a/meta/lib/oeqa/selftest/cases/rust.py +++ b/meta/lib/oeqa/selftest/cases/rust.py | |||
@@ -210,9 +210,8 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase): | |||
210 | tmpdir = get_bb_var("TMPDIR", "rust") | 210 | tmpdir = get_bb_var("TMPDIR", "rust") |
211 | 211 | ||
212 | # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. | 212 | # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. |
213 | cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath | 213 | cmd = "export TARGET_VENDOR=\"-poky\";" |
214 | cmd = cmd + " export TARGET_VENDOR=\"-poky\";" | 214 | cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir) |
215 | cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir) | ||
216 | cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath | 215 | cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath |
217 | # Trigger testing. | 216 | # Trigger testing. |
218 | cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip | 217 | cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip |
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py index 86d6cd7464..94ad6e38b6 100644 --- a/meta/lib/oeqa/selftest/cases/sstatetests.py +++ b/meta/lib/oeqa/selftest/cases/sstatetests.py | |||
@@ -917,15 +917,25 @@ INHERIT += "base-do-configure-modified" | |||
917 | """, | 917 | """, |
918 | expected_sametmp_output, expected_difftmp_output) | 918 | expected_sametmp_output, expected_difftmp_output) |
919 | 919 | ||
920 | @OETestTag("yocto-mirrors") | 920 | class SStateCheckObjectPresence(SStateBase): |
921 | class SStateMirrors(SStateBase): | 921 | def check_bb_output(self, output, targets, exceptions, check_cdn): |
922 | def check_bb_output(self, output, exceptions, check_cdn): | ||
923 | def is_exception(object, exceptions): | 922 | def is_exception(object, exceptions): |
924 | for e in exceptions: | 923 | for e in exceptions: |
925 | if re.search(e, object): | 924 | if re.search(e, object): |
926 | return True | 925 | return True |
927 | return False | 926 | return False |
928 | 927 | ||
928 | # sstate is checked for existence of these, but they never get written out to begin with | ||
929 | exceptions += ["{}.*image_qa".format(t) for t in targets.split()] | ||
930 | exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()] | ||
931 | exceptions += ["{}.*image_complete".format(t) for t in targets.split()] | ||
932 | exceptions += ["linux-yocto.*shared_workdir"] | ||
933 | # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64) | ||
934 | # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks, | ||
935 | # which makes tracing other changes difficult | ||
936 | exceptions += ["{}.*create_spdx".format(t) for t in targets.split()] | ||
937 | exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()] | ||
938 | |||
929 | output_l = output.splitlines() | 939 | output_l = output.splitlines() |
930 | for l in output_l: | 940 | for l in output_l: |
931 | if l.startswith("Sstate summary"): | 941 | if l.startswith("Sstate summary"): |
@@ -960,18 +970,9 @@ class SStateMirrors(SStateBase): | |||
960 | self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | 970 | self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) |
961 | self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) | 971 | self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) |
962 | 972 | ||
973 | @OETestTag("yocto-mirrors") | ||
974 | class SStateMirrors(SStateCheckObjectPresence): | ||
963 | def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): | 975 | def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): |
964 | # sstate is checked for existence of these, but they never get written out to begin with | ||
965 | exceptions += ["{}.*image_qa".format(t) for t in targets.split()] | ||
966 | exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()] | ||
967 | exceptions += ["{}.*image_complete".format(t) for t in targets.split()] | ||
968 | exceptions += ["linux-yocto.*shared_workdir"] | ||
969 | # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64) | ||
970 | # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks, | ||
971 | # which makes tracing other changes difficult | ||
972 | exceptions += ["{}.*create_spdx".format(t) for t in targets.split()] | ||
973 | exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()] | ||
974 | |||
975 | if check_cdn: | 976 | if check_cdn: |
976 | self.config_sstate(True) | 977 | self.config_sstate(True) |
977 | self.append_config(""" | 978 | self.append_config(""" |
@@ -987,7 +988,7 @@ MACHINE = "{}" | |||
987 | bitbake("-S none {}".format(targets)) | 988 | bitbake("-S none {}".format(targets)) |
988 | if ignore_errors: | 989 | if ignore_errors: |
989 | return | 990 | return |
990 | self.check_bb_output(result.output, exceptions, check_cdn) | 991 | self.check_bb_output(result.output, targets, exceptions, check_cdn) |
991 | 992 | ||
992 | def test_cdn_mirror_qemux86_64(self): | 993 | def test_cdn_mirror_qemux86_64(self): |
993 | exceptions = [] | 994 | exceptions = [] |
diff --git a/meta/lib/patchtest/repo.py b/meta/lib/patchtest/repo.py index d3788f466d..5f361ac500 100644 --- a/meta/lib/patchtest/repo.py +++ b/meta/lib/patchtest/repo.py | |||
@@ -11,6 +11,7 @@ | |||
11 | import os | 11 | import os |
12 | import utils | 12 | import utils |
13 | import logging | 13 | import logging |
14 | import git | ||
14 | from patch import PatchTestPatch | 15 | from patch import PatchTestPatch |
15 | 16 | ||
16 | logger = logging.getLogger('patchtest') | 17 | logger = logging.getLogger('patchtest') |
@@ -21,15 +22,17 @@ class PatchTestRepo(object): | |||
21 | # prefixes used for temporal branches/stashes | 22 | # prefixes used for temporal branches/stashes |
22 | prefix = 'patchtest' | 23 | prefix = 'patchtest' |
23 | 24 | ||
25 | |||
24 | def __init__(self, patch, repodir, commit=None, branch=None): | 26 | def __init__(self, patch, repodir, commit=None, branch=None): |
25 | self._repodir = repodir | 27 | self._repodir = repodir |
28 | self._repo = git.Repo.init(repodir) | ||
26 | self._patch = PatchTestPatch(patch) | 29 | self._patch = PatchTestPatch(patch) |
27 | self._current_branch = self._get_current_branch() | 30 | self._current_branch = self._repo.active_branch.name |
28 | 31 | ||
29 | # targeted branch defined on the patch may be invalid, so make sure there | 32 | # targeted branch defined on the patch may be invalid, so make sure there |
30 | # is a corresponding remote branch | 33 | # is a corresponding remote branch |
31 | valid_patch_branch = None | 34 | valid_patch_branch = None |
32 | if self._patch.branch in self.upstream_branches(): | 35 | if self._patch.branch in self._repo.branches: |
33 | valid_patch_branch = self._patch.branch | 36 | valid_patch_branch = self._patch.branch |
34 | 37 | ||
35 | # Target Branch | 38 | # Target Branch |
@@ -52,22 +55,19 @@ class PatchTestRepo(object): | |||
52 | 55 | ||
53 | self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid()) | 56 | self._workingbranch = "%s_%s" % (PatchTestRepo.prefix, os.getpid()) |
54 | 57 | ||
55 | # create working branch | 58 | # create working branch. Use the '-B' flag so that we just |
56 | self._exec({'cmd': ['git', 'checkout', '-b', self._workingbranch, self._commit]}) | 59 | # check out the existing one if it's there |
60 | self._repo.git.execute(['git', 'checkout', '-B', self._workingbranch, self._commit]) | ||
57 | 61 | ||
58 | self._patchmerged = False | 62 | self._patchmerged = False |
59 | 63 | ||
60 | # Check if patch can be merged using git-am | 64 | # Check if patch can be merged using git-am |
61 | self._patchcanbemerged = True | 65 | self._patchcanbemerged = True |
62 | try: | 66 | try: |
63 | self._exec({'cmd': ['git', 'am', '--keep-cr'], 'input': self._patch.contents}) | 67 | # Make sure to get the absolute path of the file |
64 | except utils.CmdException as ce: | 68 | self._repo.git.execute(['git', 'apply', '--check', os.path.abspath(self._patch.path)], with_exceptions=True) |
65 | self._exec({'cmd': ['git', 'am', '--abort']}) | 69 | except git.exc.GitCommandError as ce: |
66 | self._patchcanbemerged = False | 70 | self._patchcanbemerged = False |
67 | finally: | ||
68 | # if patch was applied, remove it | ||
69 | if self._patchcanbemerged: | ||
70 | self._exec({'cmd':['git', 'reset', '--hard', self._commit]}) | ||
71 | 71 | ||
72 | # for debugging purposes, print all repo parameters | 72 | # for debugging purposes, print all repo parameters |
73 | logger.debug("Parameters") | 73 | logger.debug("Parameters") |
@@ -97,78 +97,24 @@ class PatchTestRepo(object): | |||
97 | def canbemerged(self): | 97 | def canbemerged(self): |
98 | return self._patchcanbemerged | 98 | return self._patchcanbemerged |
99 | 99 | ||
100 | def _exec(self, cmds): | ||
101 | _cmds = [] | ||
102 | if isinstance(cmds, dict): | ||
103 | _cmds.append(cmds) | ||
104 | elif isinstance(cmds, list): | ||
105 | _cmds = cmds | ||
106 | else: | ||
107 | raise utils.CmdException({'cmd':str(cmds)}) | ||
108 | |||
109 | results = [] | ||
110 | cmdfailure = False | ||
111 | try: | ||
112 | results = utils.exec_cmds(_cmds, self._repodir) | ||
113 | except utils.CmdException as ce: | ||
114 | cmdfailure = True | ||
115 | raise ce | ||
116 | finally: | ||
117 | if cmdfailure: | ||
118 | for cmd in _cmds: | ||
119 | logger.debug("CMD: %s" % ' '.join(cmd['cmd'])) | ||
120 | else: | ||
121 | for result in results: | ||
122 | cmd, rc, stdout, stderr = ' '.join(result['cmd']), result['returncode'], result['stdout'], result['stderr'] | ||
123 | logger.debug("CMD: %s RCODE: %s STDOUT: %s STDERR: %s" % (cmd, rc, stdout, stderr)) | ||
124 | |||
125 | return results | ||
126 | |||
127 | def _get_current_branch(self, commit='HEAD'): | ||
128 | cmd = {'cmd':['git', 'rev-parse', '--abbrev-ref', commit]} | ||
129 | cb = self._exec(cmd)[0]['stdout'] | ||
130 | if cb == commit: | ||
131 | logger.warning('You may be detached so patchtest will checkout to master after execution') | ||
132 | cb = 'master' | ||
133 | return cb | ||
134 | |||
135 | def _get_commitid(self, commit): | 100 | def _get_commitid(self, commit): |
136 | 101 | ||
137 | if not commit: | 102 | if not commit: |
138 | return None | 103 | return None |
139 | 104 | ||
140 | try: | 105 | try: |
141 | cmd = {'cmd':['git', 'rev-parse', '--short', commit]} | 106 | return self._repo.rev_parse(commit).hexsha |
142 | return self._exec(cmd)[0]['stdout'] | 107 | except Exception as e: |
143 | except utils.CmdException as ce: | 108 | print(f"Couldn't find commit {commit} in repo") |
144 | # try getting the commit under any remotes | ||
145 | cmd = {'cmd':['git', 'remote']} | ||
146 | remotes = self._exec(cmd)[0]['stdout'] | ||
147 | for remote in remotes.splitlines(): | ||
148 | cmd = {'cmd':['git', 'rev-parse', '--short', '%s/%s' % (remote, commit)]} | ||
149 | try: | ||
150 | return self._exec(cmd)[0]['stdout'] | ||
151 | except utils.CmdException: | ||
152 | pass | ||
153 | 109 | ||
154 | return None | 110 | return None |
155 | 111 | ||
156 | def upstream_branches(self): | ||
157 | cmd = {'cmd':['git', 'branch', '--remotes']} | ||
158 | remote_branches = self._exec(cmd)[0]['stdout'] | ||
159 | |||
160 | # just get the names, without the remote name | ||
161 | branches = set(branch.split('/')[-1] for branch in remote_branches.splitlines()) | ||
162 | return branches | ||
163 | |||
164 | def merge(self): | 112 | def merge(self): |
165 | if self._patchcanbemerged: | 113 | if self._patchcanbemerged: |
166 | self._exec({'cmd': ['git', 'am', '--keep-cr'], | 114 | self._repo.git.execute(['git', 'am', '--keep-cr', os.path.abspath(self._patch.path)]) |
167 | 'input': self._patch.contents, | ||
168 | 'updateenv': {'PTRESOURCE':self._patch.path}}) | ||
169 | self._patchmerged = True | 115 | self._patchmerged = True |
170 | 116 | ||
171 | def clean(self): | 117 | def clean(self): |
172 | self._exec({'cmd':['git', 'checkout', '%s' % self._current_branch]}) | 118 | self._repo.git.execute(['git', 'checkout', self._current_branch]) |
173 | self._exec({'cmd':['git', 'branch', '-D', self._workingbranch]}) | 119 | self._repo.git.execute(['git', 'branch', '-D', self._workingbranch]) |
174 | self._patchmerged = False | 120 | self._patchmerged = False |
diff --git a/meta/lib/patchtest/requirements.txt b/meta/lib/patchtest/requirements.txt index ba55ff905e..4247b91f09 100644 --- a/meta/lib/patchtest/requirements.txt +++ b/meta/lib/patchtest/requirements.txt | |||
@@ -1,5 +1,6 @@ | |||
1 | boto3 | 1 | boto3 |
2 | git-pw>=2.5.0 | 2 | git-pw>=2.5.0 |
3 | GitPython | ||
3 | jinja2 | 4 | jinja2 |
4 | pylint | 5 | pylint |
5 | pyparsing>=3.0.9 | 6 | pyparsing>=3.0.9 |
diff --git a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail index 80f409e952..854d7eb8c7 100644 --- a/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail +++ b/meta/lib/patchtest/selftest/files/TestMbox.test_bugzilla_entry_format.fail | |||
@@ -1,25 +1,26 @@ | |||
1 | From fdfd605e565d874502522c4b70b786c8c5aa0bad Mon Sep 17 00:00:00 2001 | 1 | From f06e14633723c1e78bc7a4b0fd0d3b79d09f0c68 Mon Sep 17 00:00:00 2001 |
2 | From: name@somedomain.com <email@address.com> | 2 | From: name@somedomain.com <email@address.com> |
3 | Date: Fri, 17 Feb 2017 16:29:21 -0600 | 3 | Date: Thu, 2 May 2024 10:21:45 -0400 |
4 | Subject: [PATCH] README: adds 'foo' to the header | 4 | Subject: [PATCH] README.OE-Core.md: Add foo to header |
5 | 5 | ||
6 | This test patch adds 'foo' to the header | 6 | This test patch adds 'foo' to the header of README.OE-Core.md |
7 | 7 | ||
8 | [YOCTO 1234] | 8 | [YOCTO 1234] |
9 | 9 | ||
10 | Signed-off-by: Daniela Plascencia <daniela.plascencia@linux.intel.com> | 10 | Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> |
11 | --- | 11 | --- |
12 | README | 1 + | 12 | README.OE-Core.md | 1 + |
13 | 1 file changed, 1 insertion(+) | 13 | 1 file changed, 1 insertion(+) |
14 | 14 | ||
15 | diff --git a/README b/README | 15 | diff --git a/README.OE-Core.md b/README.OE-Core.md |
16 | index 521916cd4f..cdf29dcea3 100644 | 16 | index 687c58e410c..9d863891134 100644 |
17 | --- a/README | 17 | --- a/README.OE-Core.md |
18 | +++ b/README | 18 | +++ b/README.OE-Core.md |
19 | @@ -1,3 +1,4 @@ | 19 | @@ -1,3 +1,4 @@ |
20 | +**** FOO **** | 20 | +**** FOO **** |
21 | OpenEmbedded-Core | 21 | OpenEmbedded-Core |
22 | ================= | 22 | ================= |
23 | |||
24 | -- | ||
25 | 2.44.0 | ||
23 | 26 | ||
24 | -- | ||
25 | 2.11.0 | ||
diff --git a/meta/lib/patchtest/utils.py b/meta/lib/patchtest/utils.py index dd0abc22d9..8eddf3e85f 100644 --- a/meta/lib/patchtest/utils.py +++ b/meta/lib/patchtest/utils.py | |||
@@ -14,109 +14,6 @@ import logging | |||
14 | import re | 14 | import re |
15 | import mailbox | 15 | import mailbox |
16 | 16 | ||
17 | class CmdException(Exception): | ||
18 | """ Simple exception class where its attributes are the ones passed when instantiated """ | ||
19 | def __init__(self, cmd): | ||
20 | self._cmd = cmd | ||
21 | def __getattr__(self, name): | ||
22 | value = None | ||
23 | if self._cmd.has_key(name): | ||
24 | value = self._cmd[name] | ||
25 | return value | ||
26 | |||
27 | def exec_cmd(cmd, cwd, ignore_error=False, input=None, strip=True, updateenv={}): | ||
28 | """ | ||
29 | Input: | ||
30 | |||
31 | cmd: dict containing the following keys: | ||
32 | |||
33 | cmd : the command itself as an array of strings | ||
34 | ignore_error: if False, no exception is raised | ||
35 | strip: indicates if strip is done on the output (stdout and stderr) | ||
36 | input: input data to the command (stdin) | ||
37 | updateenv: environment variables to be appended to the current | ||
38 | process environment variables | ||
39 | |||
40 | NOTE: keys 'ignore_error' and 'input' are optional; if not included, | ||
41 | the defaults are the ones specify in the arguments | ||
42 | cwd: directory where commands are executed | ||
43 | ignore_error: raise CmdException if command fails to execute and | ||
44 | this value is False | ||
45 | input: input data (stdin) for the command | ||
46 | |||
47 | Output: dict containing the following keys: | ||
48 | |||
49 | cmd: the same as input | ||
50 | ignore_error: the same as input | ||
51 | strip: the same as input | ||
52 | input: the same as input | ||
53 | stdout: Standard output after command's execution | ||
54 | stderr: Standard error after command's execution | ||
55 | returncode: Return code after command's execution | ||
56 | |||
57 | """ | ||
58 | cmddefaults = { | ||
59 | 'cmd':'', | ||
60 | 'ignore_error':ignore_error, | ||
61 | 'strip':strip, | ||
62 | 'input':input, | ||
63 | 'updateenv':updateenv, | ||
64 | } | ||
65 | |||
66 | # update input values if necessary | ||
67 | cmddefaults.update(cmd) | ||
68 | |||
69 | _cmd = cmddefaults | ||
70 | |||
71 | if not _cmd['cmd']: | ||
72 | raise CmdException({'cmd':None, 'stderr':'no command given'}) | ||
73 | |||
74 | # update the environment | ||
75 | env = os.environ | ||
76 | env.update(_cmd['updateenv']) | ||
77 | |||
78 | _command = [e for e in _cmd['cmd']] | ||
79 | p = subprocess.Popen(_command, | ||
80 | stdin=subprocess.PIPE, | ||
81 | stdout=subprocess.PIPE, | ||
82 | stderr=subprocess.PIPE, | ||
83 | universal_newlines=True, | ||
84 | cwd=cwd, | ||
85 | env=env) | ||
86 | |||
87 | # execute the command and strip output | ||
88 | (_stdout, _stderr) = p.communicate(_cmd['input']) | ||
89 | if _cmd['strip']: | ||
90 | _stdout, _stderr = map(str.strip, [_stdout, _stderr]) | ||
91 | |||
92 | # generate the result | ||
93 | result = _cmd | ||
94 | result.update({'cmd':_command,'stdout':_stdout,'stderr':_stderr,'returncode':p.returncode}) | ||
95 | |||
96 | # launch exception if necessary | ||
97 | if not _cmd['ignore_error'] and p.returncode: | ||
98 | raise CmdException(result) | ||
99 | |||
100 | return result | ||
101 | |||
102 | def exec_cmds(cmds, cwd): | ||
103 | """ Executes commands | ||
104 | |||
105 | Input: | ||
106 | cmds: Array of commands | ||
107 | cwd: directory where commands are executed | ||
108 | |||
109 | Output: Array of output commands | ||
110 | """ | ||
111 | results = [] | ||
112 | _cmds = cmds | ||
113 | |||
114 | for cmd in _cmds: | ||
115 | result = exec_cmd(cmd, cwd) | ||
116 | results.append(result) | ||
117 | |||
118 | return results | ||
119 | |||
120 | def logger_create(name): | 17 | def logger_create(name): |
121 | logger = logging.getLogger(name) | 18 | logger = logging.getLogger(name) |
122 | loggerhandler = logging.StreamHandler() | 19 | loggerhandler = logging.StreamHandler() |
@@ -125,20 +22,6 @@ def logger_create(name): | |||
125 | logger.setLevel(logging.INFO) | 22 | logger.setLevel(logging.INFO) |
126 | return logger | 23 | return logger |
127 | 24 | ||
128 | def get_subject_prefix(path): | ||
129 | prefix = "" | ||
130 | mbox = mailbox.mbox(path) | ||
131 | |||
132 | if len(mbox): | ||
133 | subject = mbox[0]['subject'] | ||
134 | if subject: | ||
135 | pattern = re.compile(r"(\[.*\])", re.DOTALL) | ||
136 | match = pattern.search(subject) | ||
137 | if match: | ||
138 | prefix = match.group(1) | ||
139 | |||
140 | return prefix | ||
141 | |||
142 | def valid_branch(branch): | 25 | def valid_branch(branch): |
143 | """ Check if branch is valid name """ | 26 | """ Check if branch is valid name """ |
144 | lbranch = branch.lower() | 27 | lbranch = branch.lower() |
@@ -153,7 +36,17 @@ def valid_branch(branch): | |||
153 | 36 | ||
154 | def get_branch(path): | 37 | def get_branch(path): |
155 | """ Get the branch name from mbox """ | 38 | """ Get the branch name from mbox """ |
156 | fullprefix = get_subject_prefix(path) | 39 | fullprefix = "" |
40 | mbox = mailbox.mbox(path) | ||
41 | |||
42 | if len(mbox): | ||
43 | subject = mbox[0]['subject'] | ||
44 | if subject: | ||
45 | pattern = re.compile(r"(\[.*\])", re.DOTALL) | ||
46 | match = pattern.search(subject) | ||
47 | if match: | ||
48 | fullprefix = match.group(1) | ||
49 | |||
157 | branch, branches, valid_branches = None, [], [] | 50 | branch, branches, valid_branches = None, [], [] |
158 | 51 | ||
159 | if fullprefix: | 52 | if fullprefix: |
diff --git a/meta/recipes-bsp/alsa-state/alsa-state.bb b/meta/recipes-bsp/alsa-state/alsa-state.bb index 83384f2daf..099fbd3b9d 100644 --- a/meta/recipes-bsp/alsa-state/alsa-state.bb +++ b/meta/recipes-bsp/alsa-state/alsa-state.bb | |||
@@ -21,7 +21,8 @@ SRC_URI = "\ | |||
21 | file://alsa-state-init \ | 21 | file://alsa-state-init \ |
22 | " | 22 | " |
23 | 23 | ||
24 | S = "${WORKDIR}" | 24 | S = "${WORKDIR}/sources" |
25 | UNPACKDIR = "${S}" | ||
25 | 26 | ||
26 | # As the recipe doesn't inherit systemd.bbclass, we need to set this variable | 27 | # As the recipe doesn't inherit systemd.bbclass, we need to set this variable |
27 | # manually to avoid unnecessary postinst/preinst generated. | 28 | # manually to avoid unnecessary postinst/preinst generated. |
diff --git a/meta/recipes-bsp/formfactor/formfactor_0.0.bb b/meta/recipes-bsp/formfactor/formfactor_0.0.bb index 1eaf30746b..4714bb2e5e 100644 --- a/meta/recipes-bsp/formfactor/formfactor_0.0.bb +++ b/meta/recipes-bsp/formfactor/formfactor_0.0.bb | |||
@@ -7,7 +7,9 @@ LICENSE = "MIT" | |||
7 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 7 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
8 | 8 | ||
9 | SRC_URI = "file://config file://machconfig" | 9 | SRC_URI = "file://config file://machconfig" |
10 | S = "${WORKDIR}" | 10 | |
11 | S = "${WORKDIR}/sources" | ||
12 | UNPACKDIR = "${S}" | ||
11 | 13 | ||
12 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 14 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
13 | INHIBIT_DEFAULT_DEPS = "1" | 15 | INHIBIT_DEFAULT_DEPS = "1" |
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-Fix-parallel-make-failure-for-archives.patch index 63d9b6fc31..3c11baca0c 100644 --- a/meta/recipes-bsp/gnu-efi/gnu-efi/parallel-make-archives.patch +++ b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-Fix-parallel-make-failure-for-archives.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From f56ddb00a656af2e84f839738fad19909ac65047 Mon Sep 17 00:00:00 2001 | 1 | From 70e30774debb9ab5d53a29c183f86fc569661b7c Mon Sep 17 00:00:00 2001 |
2 | From: Saul Wold <sgw@linux.intel.com> | 2 | From: Saul Wold <sgw@linux.intel.com> |
3 | Date: Sun, 9 Mar 2014 15:22:15 +0200 | 3 | Date: Sun, 9 Mar 2014 15:22:15 +0200 |
4 | Subject: [PATCH] Fix parallel make failure for archives | 4 | Subject: [PATCH] Fix parallel make failure for archives |
@@ -19,16 +19,15 @@ Signed-off-by: Darren Hart <dvhart@linux.intel.com> | |||
19 | Signed-off-by: California Sullivan <california.l.sullivan@intel.com> | 19 | Signed-off-by: California Sullivan <california.l.sullivan@intel.com> |
20 | [Rebased for 3.0.8] | 20 | [Rebased for 3.0.8] |
21 | Signed-off-by: Yi Zhao <yi.zhao@windriver.com> | 21 | Signed-off-by: Yi Zhao <yi.zhao@windriver.com> |
22 | |||
23 | --- | 22 | --- |
24 | lib/Makefile | 2 +- | 23 | lib/Makefile | 2 +- |
25 | 1 file changed, 1 insertion(+), 1 deletion(-) | 24 | 1 file changed, 1 insertion(+), 1 deletion(-) |
26 | 25 | ||
27 | diff --git a/lib/Makefile b/lib/Makefile | 26 | diff --git a/lib/Makefile b/lib/Makefile |
28 | index 1fc6a47..54b0ca7 100644 | 27 | index ec1f9e3..79a794d 100644 |
29 | --- a/lib/Makefile | 28 | --- a/lib/Makefile |
30 | +++ b/lib/Makefile | 29 | +++ b/lib/Makefile |
31 | @@ -77,7 +77,7 @@ libsubdirs: | 30 | @@ -75,7 +75,7 @@ libsubdirs: |
32 | $(OBJS): libsubdirs | 31 | $(OBJS): libsubdirs |
33 | 32 | ||
34 | libefi.a: $(OBJS) | 33 | libefi.a: $(OBJS) |
@@ -36,4 +35,7 @@ index 1fc6a47..54b0ca7 100644 | |||
36 | + $(AR) $(ARFLAGS) $@ $(OBJS) | 35 | + $(AR) $(ARFLAGS) $@ $(OBJS) |
37 | 36 | ||
38 | clean: | 37 | clean: |
39 | rm -f libefi.a *~ $(OBJS) */*.o | 38 | @rm -vf libefi.a *~ $(OBJS) */*.o |
39 | -- | ||
40 | 2.25.1 | ||
41 | |||
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch deleted file mode 100644 index 3475606264..0000000000 --- a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-adjust-type-definitions.patch +++ /dev/null | |||
@@ -1,34 +0,0 @@ | |||
1 | From 1de509497826faa0ad84b82f5e2c3d21ee613459 Mon Sep 17 00:00:00 2001 | ||
2 | From: Moody Liu <mooodyhunter@outlook.com> | ||
3 | Date: Sat, 13 May 2023 17:39:16 +0100 | ||
4 | Subject: [PATCH] riscv64: adjust type definitions | ||
5 | |||
6 | CHAR8 needs to be defined while BOOLEAN should be removed | ||
7 | here to prevent typedef conflicts | ||
8 | |||
9 | Upstream-Status: Backport [https://sourceforge.net/p/gnu-efi/code/ci/1de509497826faa0ad84b82f5e2c3d21ee613459/] | ||
10 | Signed-off-by: Moody Liu <mooodyhunter@outlook.com> | ||
11 | --- | ||
12 | inc/riscv64/efibind.h | 4 +--- | ||
13 | 1 file changed, 1 insertion(+), 3 deletions(-) | ||
14 | |||
15 | diff --git a/inc/riscv64/efibind.h b/inc/riscv64/efibind.h | ||
16 | index 4fdf81d..d8b4f39 100644 | ||
17 | --- a/inc/riscv64/efibind.h | ||
18 | +++ b/inc/riscv64/efibind.h | ||
19 | @@ -32,11 +32,9 @@ typedef uint16_t UINT16; | ||
20 | typedef int16_t INT16; | ||
21 | typedef uint8_t UINT8; | ||
22 | typedef int8_t INT8; | ||
23 | +typedef char CHAR8; | ||
24 | typedef wchar_t CHAR16; | ||
25 | #define WCHAR CHAR16 | ||
26 | -#ifndef BOOLEAN | ||
27 | -typedef uint8_t BOOLEAN; | ||
28 | -#endif | ||
29 | #undef VOID | ||
30 | typedef void VOID; | ||
31 | typedef int64_t INTN; | ||
32 | -- | ||
33 | 2.41.0 | ||
34 | |||
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch deleted file mode 100644 index 5b3c152c5e..0000000000 --- a/meta/recipes-bsp/gnu-efi/gnu-efi/0001-riscv64-ignore-unknown-relocs.patch +++ /dev/null | |||
@@ -1,32 +0,0 @@ | |||
1 | From 708f66acfec9a86f237726d45095cbd380fd83ca Mon Sep 17 00:00:00 2001 | ||
2 | From: Callum Farmer <gmbr3@opensuse.org> | ||
3 | Date: Wed, 21 Jun 2023 11:32:28 +0100 | ||
4 | Subject: [PATCH] riscv64: ignore unknown relocs | ||
5 | |||
6 | Sometimes ld emits relocs such as R_RISCV_64 for unwind symbols | ||
7 | these don't need to be handled yet so just can be skipped otherwise | ||
8 | the binary will never load | ||
9 | |||
10 | Upstream-Status: Backport [https://sourceforge.net/p/gnu-efi/code/ci/708f66acfec9a86f237726d45095cbd380fd83ca/] | ||
11 | Signed-off-by: Callum Farmer <gmbr3@opensuse.org> | ||
12 | --- | ||
13 | gnuefi/reloc_riscv64.c | 3 +-- | ||
14 | 1 file changed, 1 insertion(+), 2 deletions(-) | ||
15 | |||
16 | diff --git a/gnuefi/reloc_riscv64.c b/gnuefi/reloc_riscv64.c | ||
17 | index 0b02d83..e429602 100644 | ||
18 | --- a/gnuefi/reloc_riscv64.c | ||
19 | +++ b/gnuefi/reloc_riscv64.c | ||
20 | @@ -81,8 +81,7 @@ EFI_STATUS EFIAPI _relocate(long ldbase, Elf_Dyn *dyn) | ||
21 | *addr = ldbase + rel->r_addend; | ||
22 | break; | ||
23 | default: | ||
24 | - /* Panic */ | ||
25 | - while (1) ; | ||
26 | + break; | ||
27 | } | ||
28 | rel = (Elf_Rela *)((char *)rel + relent); | ||
29 | relsz -= relent; | ||
30 | -- | ||
31 | 2.41.0 | ||
32 | |||
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/0002-Do-not-treat-warnings-as-errors.patch index db2bcf70b7..37befc8b4c 100644 --- a/meta/recipes-bsp/gnu-efi/gnu-efi/no-werror.patch +++ b/meta/recipes-bsp/gnu-efi/gnu-efi/0002-Do-not-treat-warnings-as-errors.patch | |||
@@ -1,14 +1,24 @@ | |||
1 | Do not treat warnings as errors | 1 | From 89218bb3c5eb7b97987769cb4f7bf8323d35ca7e Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Thu, 16 May 2024 21:38:32 +0800 | ||
4 | Subject: [PATCH] Do not treat warnings as errors | ||
2 | 5 | ||
3 | There are additional warnings found with musl which are | 6 | There are additional warnings found with musl which are |
4 | treated as errors and fails the build, we have more combinations | 7 | treated as errors and fails the build, we have more combinations |
5 | then upstream supports to handle | 8 | then upstream supports to handle |
6 | 9 | ||
7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
8 | Upstream-Status: Inappropriate [OE specific] | 10 | Upstream-Status: Inappropriate [OE specific] |
11 | |||
12 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
13 | --- | ||
14 | Make.defaults | 2 +- | ||
15 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
16 | |||
17 | diff --git a/Make.defaults b/Make.defaults | ||
18 | index 0068e31..b643285 100755 | ||
9 | --- a/Make.defaults | 19 | --- a/Make.defaults |
10 | +++ b/Make.defaults | 20 | +++ b/Make.defaults |
11 | @@ -187,7 +187,7 @@ CFLAGS += $(ARCH3264) -g -O2 -Wall -Wex | 21 | @@ -190,7 +190,7 @@ CFLAGS += $(ARCH3264) -g -O2 -Wall -Wextra -Werror \ |
12 | -funsigned-char -fshort-wchar -fno-strict-aliasing \ | 22 | -funsigned-char -fshort-wchar -fno-strict-aliasing \ |
13 | -ffreestanding -fno-stack-protector | 23 | -ffreestanding -fno-stack-protector |
14 | else | 24 | else |
@@ -16,4 +26,7 @@ Upstream-Status: Inappropriate [OE specific] | |||
16 | +CFLAGS += $(ARCH3264) -g -O2 -Wall -Wextra -Wno-pointer-sign \ | 26 | +CFLAGS += $(ARCH3264) -g -O2 -Wall -Wextra -Wno-pointer-sign \ |
17 | -funsigned-char -fshort-wchar -fno-strict-aliasing \ | 27 | -funsigned-char -fshort-wchar -fno-strict-aliasing \ |
18 | -ffreestanding -fno-stack-protector -fno-stack-check \ | 28 | -ffreestanding -fno-stack-protector -fno-stack-check \ |
19 | -fno-stack-check \ | 29 | $(if $(findstring gcc,$(CC)),-fno-merge-all-constants,) |
30 | -- | ||
31 | 2.25.1 | ||
32 | |||
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi/gnu-efi-3.0.9-fix-clang-build.patch b/meta/recipes-bsp/gnu-efi/gnu-efi/gnu-efi-3.0.9-fix-clang-build.patch deleted file mode 100644 index c6d660095e..0000000000 --- a/meta/recipes-bsp/gnu-efi/gnu-efi/gnu-efi-3.0.9-fix-clang-build.patch +++ /dev/null | |||
@@ -1,24 +0,0 @@ | |||
1 | Fix building with CLANG-9.0.0 | ||
2 | |||
3 | Fixes | ||
4 | clang-9: error: unknown argument: '-maccumulate-outgoing-args' | ||
5 | |||
6 | Upstream-Status: Submitted [https://sourceforge.net/p/gnu-efi/patches/70/] | ||
7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
8 | |||
9 | --- a/Make.defaults | ||
10 | +++ b/Make.defaults | ||
11 | @@ -110,10 +110,10 @@ | ||
12 | || ( [ $(GCCVERSION) -eq "4" ] \ | ||
13 | && [ $(GCCMINOR) -ge "7" ] ) ) \ | ||
14 | && echo 1) | ||
15 | - ifeq ($(GCCNEWENOUGH),1) | ||
16 | - CPPFLAGS += -DGNU_EFI_USE_MS_ABI -maccumulate-outgoing-args --std=c11 | ||
17 | - else ifeq ($(USING_CLANG),clang) | ||
18 | + ifeq ($(USING_CLANG),clang) | ||
19 | CPPFLAGS += -DGNU_EFI_USE_MS_ABI --std=c11 | ||
20 | + else ifeq ($(GCCNEWENOUGH),1) | ||
21 | + CPPFLAGS += -DGNU_EFI_USE_MS_ABI -maccumulate-outgoing-args --std=c11 | ||
22 | endif | ||
23 | |||
24 | CFLAGS += -mno-red-zone | ||
diff --git a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.18.bb index 43b7cc7529..a56a85bdef 100644 --- a/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.17.bb +++ b/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.18.bb | |||
@@ -13,13 +13,10 @@ LIC_FILES_CHKSUM = "file://gnuefi/crt0-efi-arm.S;beginline=4;endline=16;md5=e582 | |||
13 | " | 13 | " |
14 | 14 | ||
15 | SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/files/${BP}.tar.bz2 \ | 15 | SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/files/${BP}.tar.bz2 \ |
16 | file://parallel-make-archives.patch \ | 16 | file://0001-Fix-parallel-make-failure-for-archives.patch \ |
17 | file://gnu-efi-3.0.9-fix-clang-build.patch \ | 17 | file://0002-Do-not-treat-warnings-as-errors.patch \ |
18 | file://0001-riscv64-adjust-type-definitions.patch \ | ||
19 | file://0001-riscv64-ignore-unknown-relocs.patch \ | ||
20 | file://no-werror.patch \ | ||
21 | " | 18 | " |
22 | SRC_URI[sha256sum] = "7807e903349343a7a142ebb934703a2872235e89688cf586c032b0a1087bcaf4" | 19 | SRC_URI[sha256sum] = "7f212c96ee66547eeefb531267b641e5473d7d8529f0bd8ccdefd33cf7413f5c" |
23 | 20 | ||
24 | COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*|riscv64.*)-linux" | 21 | COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*|riscv64.*)-linux" |
25 | COMPATIBLE_HOST:armv4 = 'null' | 22 | COMPATIBLE_HOST:armv4 = 'null' |
diff --git a/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch b/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch deleted file mode 100644 index 05a4697a73..0000000000 --- a/meta/recipes-bsp/grub/files/0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch +++ /dev/null | |||
@@ -1,44 +0,0 @@ | |||
1 | From 006799e9c4babe8a8340a24501b253e759614a2d Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Wed, 13 Jan 2016 19:17:31 +0000 | ||
4 | Subject: [PATCH] Disable -mfpmath=sse as well when SSE is disabled | ||
5 | |||
6 | Fixes | ||
7 | |||
8 | configure:20574: i586-poky-linux-gcc -m32 -march=core2 -msse3 | ||
9 | -mtune=generic -mfpmath=sse | ||
10 | --sysroot=/usr/local/dev/yocto/grubtest2/build/tmp/sysroots/emenlow -o | ||
11 | conftest -O2 -pipe -g -feliminate-unused-debug-types -Wall -W -Wshadow | ||
12 | -Wpointer-arith -Wmissing-prototypes -Wundef -Wstrict-prototypes -g | ||
13 | -falign-jumps=1 -falign-loops=1 -falign-functions=1 -mno-mmx -mno-sse | ||
14 | -mno-sse2 -mno-3dnow -fno-dwarf2-cfi-asm -m32 -fno-stack-protector | ||
15 | -mno-stack-arg-probe -Werror -nostdlib -Wl,--defsym,___main=0x8100 | ||
16 | -Wall -W -I$(top_srcdir)/include -I$(top_builddir)/include | ||
17 | -DGRUB_MACHINE_PCBIOS=1 -DGRUB_MACHINE=I386_PC -Wl,-O1 | ||
18 | -Wl,--hash-style=gnu -Wl,--as-needed conftest.c >&5 | ||
19 | conftest.c:1:0: error: SSE instruction set disabled, using 387 | ||
20 | arithmetics [-Werror] | ||
21 | cc1: all warnings being treated as errors | ||
22 | |||
23 | Signed-off-by: Nitin A Kamble <nitin.a.kamble@intel.com> | ||
24 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
25 | |||
26 | Upstream-Status: Pending | ||
27 | |||
28 | --- | ||
29 | configure.ac | 2 +- | ||
30 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
31 | |||
32 | diff --git a/configure.ac b/configure.ac | ||
33 | index cd667a2..8263876 100644 | ||
34 | --- a/configure.ac | ||
35 | +++ b/configure.ac | ||
36 | @@ -846,7 +846,7 @@ fi | ||
37 | if ( test "x$target_cpu" = xi386 || test "x$target_cpu" = xx86_64 ) && test "x$platform" != xemu; then | ||
38 | # Some toolchains enable these features by default, but they need | ||
39 | # registers that aren't set up properly in GRUB. | ||
40 | - TARGET_CFLAGS="$TARGET_CFLAGS -mno-mmx -mno-sse -mno-sse2 -mno-sse3 -mno-3dnow" | ||
41 | + TARGET_CFLAGS="$TARGET_CFLAGS -mno-mmx -mno-sse -mno-sse2 -mno-sse3 -mno-3dnow -mfpmath=387" | ||
42 | fi | ||
43 | |||
44 | if ( test "x$target_cpu" = xi386 || test "x$target_cpu" = xx86_64 ); then | ||
diff --git a/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch b/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch index d9012d1dd6..7c8770ce8b 100644 --- a/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch +++ b/meta/recipes-bsp/grub/files/grub-module-explicitly-keeps-symbole-.module_license.patch | |||
@@ -37,7 +37,7 @@ SYMBOL TABLE: | |||
37 | 0000000000000000 l d .modname 0000000000000000 .modname | 37 | 0000000000000000 l d .modname 0000000000000000 .modname |
38 | -------------- | 38 | -------------- |
39 | 39 | ||
40 | Upstream-Status: Pending | 40 | Upstream-Status: Inappropriate [workaround that needs investigation into @TARGET_STRIP@ behaviour in oe-core vs toolchain used by upstream] |
41 | 41 | ||
42 | Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> | 42 | Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> |
43 | 43 | ||
diff --git a/meta/recipes-bsp/grub/grub-bootconf_1.00.bb b/meta/recipes-bsp/grub/grub-bootconf_1.00.bb index 783e30bf38..fed3c7e9a3 100644 --- a/meta/recipes-bsp/grub/grub-bootconf_1.00.bb +++ b/meta/recipes-bsp/grub/grub-bootconf_1.00.bb | |||
@@ -11,7 +11,8 @@ inherit grub-efi-cfg | |||
11 | 11 | ||
12 | require conf/image-uefi.conf | 12 | require conf/image-uefi.conf |
13 | 13 | ||
14 | S = "${WORKDIR}" | 14 | S = "${WORKDIR}/sources" |
15 | UNPACKDIR = "${S}" | ||
15 | 16 | ||
16 | GRUB_CFG = "${S}/grub-bootconf" | 17 | GRUB_CFG = "${S}/grub-bootconf" |
17 | LABELS = "boot" | 18 | LABELS = "boot" |
diff --git a/meta/recipes-bsp/grub/grub-efi_2.12.bb b/meta/recipes-bsp/grub/grub-efi_2.12.bb index 9857e8e036..7df77deca3 100644 --- a/meta/recipes-bsp/grub/grub-efi_2.12.bb +++ b/meta/recipes-bsp/grub/grub-efi_2.12.bb | |||
@@ -58,7 +58,7 @@ do_mkimage() { | |||
58 | 58 | ||
59 | # Search for the grub.cfg on the local boot media by using the | 59 | # Search for the grub.cfg on the local boot media by using the |
60 | # built in cfg file provided via this recipe | 60 | # built in cfg file provided via this recipe |
61 | grub-mkimage -v -c ../cfg -p ${EFIDIR} -d ./grub-core/ \ | 61 | grub-mkimage -v -c ${UNPACKDIR}/cfg -p ${EFIDIR} -d ./grub-core/ \ |
62 | -O ${GRUB_TARGET}-efi -o ./${GRUB_IMAGE_PREFIX}${GRUB_IMAGE} \ | 62 | -O ${GRUB_TARGET}-efi -o ./${GRUB_IMAGE_PREFIX}${GRUB_IMAGE} \ |
63 | ${GRUB_MKIMAGE_MODULES} | 63 | ${GRUB_MKIMAGE_MODULES} |
64 | } | 64 | } |
diff --git a/meta/recipes-bsp/grub/grub2.inc b/meta/recipes-bsp/grub/grub2.inc index bb9aacb478..e2a2a84277 100644 --- a/meta/recipes-bsp/grub/grub2.inc +++ b/meta/recipes-bsp/grub/grub2.inc | |||
@@ -14,7 +14,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" | |||
14 | CVE_PRODUCT = "grub2" | 14 | CVE_PRODUCT = "grub2" |
15 | 15 | ||
16 | SRC_URI = "${GNU_MIRROR}/grub/grub-${PV}.tar.gz \ | 16 | SRC_URI = "${GNU_MIRROR}/grub/grub-${PV}.tar.gz \ |
17 | file://0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch \ | ||
18 | file://autogen.sh-exclude-pc.patch \ | 17 | file://autogen.sh-exclude-pc.patch \ |
19 | file://grub-module-explicitly-keeps-symbole-.module_license.patch \ | 18 | file://grub-module-explicitly-keeps-symbole-.module_license.patch \ |
20 | file://0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch \ | 19 | file://0001-grub.d-10_linux.in-add-oe-s-kernel-name.patch \ |
diff --git a/meta/recipes-bsp/keymaps/keymaps_1.0.bb b/meta/recipes-bsp/keymaps/keymaps_1.0.bb index e30dd9dca2..0425197c98 100644 --- a/meta/recipes-bsp/keymaps/keymaps_1.0.bb +++ b/meta/recipes-bsp/keymaps/keymaps_1.0.bb | |||
@@ -24,7 +24,8 @@ SRC_URI = "file://keymap.sh" | |||
24 | INITSCRIPT_NAME = "keymap.sh" | 24 | INITSCRIPT_NAME = "keymap.sh" |
25 | INITSCRIPT_PARAMS = "start 01 S ." | 25 | INITSCRIPT_PARAMS = "start 01 S ." |
26 | 26 | ||
27 | S = "${WORKDIR}" | 27 | S = "${WORKDIR}/sources" |
28 | UNPACKDIR = "${S}" | ||
28 | 29 | ||
29 | do_install () { | 30 | do_install () { |
30 | # Only install the script if 'sysvinit' is in DISTRO_FEATURES | 31 | # Only install the script if 'sysvinit' is in DISTRO_FEATURES |
diff --git a/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb b/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb index 63edcbd864..3024ddcaf0 100644 --- a/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb +++ b/meta/recipes-bsp/lrzsz/lrzsz_0.12.20.bb | |||
@@ -46,3 +46,9 @@ ALTERNATIVE_TARGET[rb] = "${bindir}/lrz" | |||
46 | ALTERNATIVE_TARGET[sz] = "${bindir}/lsz" | 46 | ALTERNATIVE_TARGET[sz] = "${bindir}/lsz" |
47 | ALTERNATIVE_TARGET[sx] = "${bindir}/lsz" | 47 | ALTERNATIVE_TARGET[sx] = "${bindir}/lsz" |
48 | ALTERNATIVE_TARGET[sb] = "${bindir}/lsz" | 48 | ALTERNATIVE_TARGET[sb] = "${bindir}/lsz" |
49 | |||
50 | # http://errors.yoctoproject.org/Errors/Details/766929/ | ||
51 | # lrzsz-0.12.20/src/tcp.c:75:56: error: passing argument 3 of 'getsockname' from incompatible pointer type [-Wincompatible-pointer-types] | ||
52 | # lrzsz-0.12.20/src/tcp.c:83:52: error: passing argument 3 of 'getsockname' from incompatible pointer type [-Wincompatible-pointer-types] | ||
53 | # lrzsz-0.12.20/src/tcp.c:103:51: error: passing argument 3 of 'accept' from incompatible pointer type [-Wincompatible-pointer-types] | ||
54 | CFLAGS += "-Wno-error=incompatible-pointer-types" | ||
diff --git a/meta/recipes-bsp/pciutils/pciutils/configure.patch b/meta/recipes-bsp/pciutils/pciutils/configure.patch index 5015cf4884..0ff151b8cf 100644 --- a/meta/recipes-bsp/pciutils/pciutils/configure.patch +++ b/meta/recipes-bsp/pciutils/pciutils/configure.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 561216c8cbc280aaa9aecf30cb11835a4a0a78ed Mon Sep 17 00:00:00 2001 | 1 | From 6af07e1f812b2444d33ce199308c87e04b2f5dc7 Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <rpurdie@linux.intel.com> | 2 | From: Richard Purdie <rpurdie@linux.intel.com> |
3 | Date: Wed, 31 Dec 2008 17:20:38 +0000 | 3 | Date: Wed, 31 Dec 2008 17:20:38 +0000 |
4 | Subject: [PATCH] pciutils: Upgarde 2.2.4 -> 3.0.3 | 4 | Subject: [PATCH] pciutils: Upgarde 2.2.4 -> 3.0.3 |
@@ -23,20 +23,20 @@ Upstream-Status: Inappropriate [embedded specific] | |||
23 | 2 files changed, 11 insertions(+), 5 deletions(-) | 23 | 2 files changed, 11 insertions(+), 5 deletions(-) |
24 | 24 | ||
25 | diff --git a/Makefile b/Makefile | 25 | diff --git a/Makefile b/Makefile |
26 | index aaec04e..9c1dab0 100644 | 26 | index be23593..aa13be5 100644 |
27 | --- a/Makefile | 27 | --- a/Makefile |
28 | +++ b/Makefile | 28 | +++ b/Makefile |
29 | @@ -123,7 +123,7 @@ pcilmr$(EXEEXT): pcilmr.o $(LMROBJS) $(COMMON) lib/$(PCIIMPLIB) | 29 | @@ -123,7 +123,7 @@ pcilmr$(EXEEXT): pcilmr.o $(LMROBJS) $(COMMON) lib/$(PCIIMPLIB) |
30 | pcilmr.o: pcilmr.c $(LMRINC) | 30 | pcilmr.o: pcilmr.c $(LMRINC) |
31 | 31 | ||
32 | %$(EXEEXT): %.o | 32 | %$(EXEEXT): %.o |
33 | - $(CC) $(LDFLAGS) $(TARGET_ARCH) $^ $(LDLIBS) -o $@ | 33 | - $(CC) $(CFLAGS) $(LDFLAGS) $(TARGET_ARCH) $^ $(LDLIBS) -o $@ |
34 | + $(CC) $(LDFLAGS) $(TARGET_ARCH) $^ $(LIB_LDLIBS) $(LDLIBS) -o $@ | 34 | + $(CC) $(CFLAGS) $(LDFLAGS) $(TARGET_ARCH) $^ $(LIB_LDLIBS) $(LDLIBS) -o $@ |
35 | 35 | ||
36 | ifdef PCI_OS_WINDOWS | 36 | ifdef PCI_OS_WINDOWS |
37 | comma := , | 37 | comma := , |
38 | diff --git a/lib/configure b/lib/configure | 38 | diff --git a/lib/configure b/lib/configure |
39 | index 3df057a..c87e71c 100755 | 39 | index d02160b..52c1eee 100755 |
40 | --- a/lib/configure | 40 | --- a/lib/configure |
41 | +++ b/lib/configure | 41 | +++ b/lib/configure |
42 | @@ -9,6 +9,10 @@ echo_n() { | 42 | @@ -9,6 +9,10 @@ echo_n() { |
@@ -88,3 +88,6 @@ index 3df057a..c87e71c 100755 | |||
88 | c=config.h | 88 | c=config.h |
89 | m=config.mk | 89 | m=config.mk |
90 | echo >$c '#define PCI_CONFIG_H' | 90 | echo >$c '#define PCI_CONFIG_H' |
91 | -- | ||
92 | 2.42.0 | ||
93 | |||
diff --git a/meta/recipes-bsp/pciutils/pciutils_3.11.1.bb b/meta/recipes-bsp/pciutils/pciutils_3.12.0.bb index 044074ccc3..480a338696 100644 --- a/meta/recipes-bsp/pciutils/pciutils_3.11.1.bb +++ b/meta/recipes-bsp/pciutils/pciutils_3.12.0.bb | |||
@@ -14,7 +14,7 @@ DEPENDS = "zlib kmod make-native" | |||
14 | SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \ | 14 | SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \ |
15 | file://configure.patch" | 15 | file://configure.patch" |
16 | 16 | ||
17 | SRC_URI[sha256sum] = "3f472ad864473de5ba17f765cc96ef5f33e1b730918d3adda6f945a2a9290df4" | 17 | SRC_URI[sha256sum] = "f185d116d5ff99b797497efce8f19f1ee8ccc5a668b97a159e3d13472f674154" |
18 | 18 | ||
19 | inherit multilib_header pkgconfig update-alternatives | 19 | inherit multilib_header pkgconfig update-alternatives |
20 | 20 | ||
diff --git a/meta/recipes-bsp/u-boot/u-boot.inc b/meta/recipes-bsp/u-boot/u-boot.inc index 3a7afb81c8..45d700fbdd 100644 --- a/meta/recipes-bsp/u-boot/u-boot.inc +++ b/meta/recipes-bsp/u-boot/u-boot.inc | |||
@@ -259,8 +259,8 @@ do_deploy () { | |||
259 | fi | 259 | fi |
260 | fi | 260 | fi |
261 | 261 | ||
262 | if [ -e ${WORKDIR}/fw_env.config ] ; then | 262 | if [ -e ${UNPACKDIR}/fw_env.config ] ; then |
263 | install -D -m 644 ${WORKDIR}/fw_env.config ${DEPLOYDIR}/fw_env.config-${MACHINE}-${PV}-${PR} | 263 | install -D -m 644 ${UNPACKDIR}/fw_env.config ${DEPLOYDIR}/fw_env.config-${MACHINE}-${PV}-${PR} |
264 | cd ${DEPLOYDIR} | 264 | cd ${DEPLOYDIR} |
265 | ln -sf fw_env.config-${MACHINE}-${PV}-${PR} fw_env.config-${MACHINE} | 265 | ln -sf fw_env.config-${MACHINE}-${PV}-${PR} fw_env.config-${MACHINE} |
266 | ln -sf fw_env.config-${MACHINE}-${PV}-${PR} fw_env.config | 266 | ln -sf fw_env.config-${MACHINE}-${PV}-${PR} fw_env.config |
diff --git a/meta/recipes-bsp/usbinit/usbinit.bb b/meta/recipes-bsp/usbinit/usbinit.bb deleted file mode 100644 index b80191bddc..0000000000 --- a/meta/recipes-bsp/usbinit/usbinit.bb +++ /dev/null | |||
@@ -1,24 +0,0 @@ | |||
1 | SUMMARY = "Initscript for enabling USB gadget Ethernet" | ||
2 | DESCRIPTION = "This module allows ethernet emulation over USB, allowing for \ | ||
3 | all sorts of nifty things like SSH and NFS in one go plus charging over the \ | ||
4 | same wire, at higher speeds than most Wifi connections." | ||
5 | HOMEPAGE = "http://linux-sunxi.org/USB_Gadget/Ethernet" | ||
6 | |||
7 | LICENSE = "GPL-2.0-only" | ||
8 | LIC_FILES_CHKSUM = "file://${S}/COPYING.GPL;md5=751419260aa954499f7abaabaa882bbe" | ||
9 | |||
10 | |||
11 | SRC_URI = "file://usb-gether \ | ||
12 | file://COPYING.GPL" | ||
13 | S = "${WORKDIR}" | ||
14 | |||
15 | do_install() { | ||
16 | install -d ${D}${sysconfdir} | ||
17 | install -d ${D}${sysconfdir}/init.d | ||
18 | install usb-gether ${D}${sysconfdir}/init.d | ||
19 | } | ||
20 | |||
21 | inherit update-rc.d allarch | ||
22 | |||
23 | INITSCRIPT_NAME = "usb-gether" | ||
24 | INITSCRIPT_PARAMS = "start 99 5 2 . stop 20 0 1 6 ." | ||
diff --git a/meta/recipes-bsp/usbinit/usbinit/COPYING.GPL b/meta/recipes-bsp/usbinit/usbinit/COPYING.GPL deleted file mode 100644 index d511905c16..0000000000 --- a/meta/recipes-bsp/usbinit/usbinit/COPYING.GPL +++ /dev/null | |||
@@ -1,339 +0,0 @@ | |||
1 | GNU GENERAL PUBLIC LICENSE | ||
2 | Version 2, June 1991 | ||
3 | |||
4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., | ||
5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
6 | Everyone is permitted to copy and distribute verbatim copies | ||
7 | of this license document, but changing it is not allowed. | ||
8 | |||
9 | Preamble | ||
10 | |||
11 | The licenses for most software are designed to take away your | ||
12 | freedom to share and change it. By contrast, the GNU General Public | ||
13 | License is intended to guarantee your freedom to share and change free | ||
14 | software--to make sure the software is free for all its users. This | ||
15 | General Public License applies to most of the Free Software | ||
16 | Foundation's software and to any other program whose authors commit to | ||
17 | using it. (Some other Free Software Foundation software is covered by | ||
18 | the GNU Lesser General Public License instead.) You can apply it to | ||
19 | your programs, too. | ||
20 | |||
21 | When we speak of free software, we are referring to freedom, not | ||
22 | price. Our General Public Licenses are designed to make sure that you | ||
23 | have the freedom to distribute copies of free software (and charge for | ||
24 | this service if you wish), that you receive source code or can get it | ||
25 | if you want it, that you can change the software or use pieces of it | ||
26 | in new free programs; and that you know you can do these things. | ||
27 | |||
28 | To protect your rights, we need to make restrictions that forbid | ||
29 | anyone to deny you these rights or to ask you to surrender the rights. | ||
30 | These restrictions translate to certain responsibilities for you if you | ||
31 | distribute copies of the software, or if you modify it. | ||
32 | |||
33 | For example, if you distribute copies of such a program, whether | ||
34 | gratis or for a fee, you must give the recipients all the rights that | ||
35 | you have. You must make sure that they, too, receive or can get the | ||
36 | source code. And you must show them these terms so they know their | ||
37 | rights. | ||
38 | |||
39 | We protect your rights with two steps: (1) copyright the software, and | ||
40 | (2) offer you this license which gives you legal permission to copy, | ||
41 | distribute and/or modify the software. | ||
42 | |||
43 | Also, for each author's protection and ours, we want to make certain | ||
44 | that everyone understands that there is no warranty for this free | ||
45 | software. If the software is modified by someone else and passed on, we | ||
46 | want its recipients to know that what they have is not the original, so | ||
47 | that any problems introduced by others will not reflect on the original | ||
48 | authors' reputations. | ||
49 | |||
50 | Finally, any free program is threatened constantly by software | ||
51 | patents. We wish to avoid the danger that redistributors of a free | ||
52 | program will individually obtain patent licenses, in effect making the | ||
53 | program proprietary. To prevent this, we have made it clear that any | ||
54 | patent must be licensed for everyone's free use or not licensed at all. | ||
55 | |||
56 | The precise terms and conditions for copying, distribution and | ||
57 | modification follow. | ||
58 | |||
59 | GNU GENERAL PUBLIC LICENSE | ||
60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION | ||
61 | |||
62 | 0. This License applies to any program or other work which contains | ||
63 | a notice placed by the copyright holder saying it may be distributed | ||
64 | under the terms of this General Public License. The "Program", below, | ||
65 | refers to any such program or work, and a "work based on the Program" | ||
66 | means either the Program or any derivative work under copyright law: | ||
67 | that is to say, a work containing the Program or a portion of it, | ||
68 | either verbatim or with modifications and/or translated into another | ||
69 | language. (Hereinafter, translation is included without limitation in | ||
70 | the term "modification".) Each licensee is addressed as "you". | ||
71 | |||
72 | Activities other than copying, distribution and modification are not | ||
73 | covered by this License; they are outside its scope. The act of | ||
74 | running the Program is not restricted, and the output from the Program | ||
75 | is covered only if its contents constitute a work based on the | ||
76 | Program (independent of having been made by running the Program). | ||
77 | Whether that is true depends on what the Program does. | ||
78 | |||
79 | 1. You may copy and distribute verbatim copies of the Program's | ||
80 | source code as you receive it, in any medium, provided that you | ||
81 | conspicuously and appropriately publish on each copy an appropriate | ||
82 | copyright notice and disclaimer of warranty; keep intact all the | ||
83 | notices that refer to this License and to the absence of any warranty; | ||
84 | and give any other recipients of the Program a copy of this License | ||
85 | along with the Program. | ||
86 | |||
87 | You may charge a fee for the physical act of transferring a copy, and | ||
88 | you may at your option offer warranty protection in exchange for a fee. | ||
89 | |||
90 | 2. You may modify your copy or copies of the Program or any portion | ||
91 | of it, thus forming a work based on the Program, and copy and | ||
92 | distribute such modifications or work under the terms of Section 1 | ||
93 | above, provided that you also meet all of these conditions: | ||
94 | |||
95 | a) You must cause the modified files to carry prominent notices | ||
96 | stating that you changed the files and the date of any change. | ||
97 | |||
98 | b) You must cause any work that you distribute or publish, that in | ||
99 | whole or in part contains or is derived from the Program or any | ||
100 | part thereof, to be licensed as a whole at no charge to all third | ||
101 | parties under the terms of this License. | ||
102 | |||
103 | c) If the modified program normally reads commands interactively | ||
104 | when run, you must cause it, when started running for such | ||
105 | interactive use in the most ordinary way, to print or display an | ||
106 | announcement including an appropriate copyright notice and a | ||
107 | notice that there is no warranty (or else, saying that you provide | ||
108 | a warranty) and that users may redistribute the program under | ||
109 | these conditions, and telling the user how to view a copy of this | ||
110 | License. (Exception: if the Program itself is interactive but | ||
111 | does not normally print such an announcement, your work based on | ||
112 | the Program is not required to print an announcement.) | ||
113 | |||
114 | These requirements apply to the modified work as a whole. If | ||
115 | identifiable sections of that work are not derived from the Program, | ||
116 | and can be reasonably considered independent and separate works in | ||
117 | themselves, then this License, and its terms, do not apply to those | ||
118 | sections when you distribute them as separate works. But when you | ||
119 | distribute the same sections as part of a whole which is a work based | ||
120 | on the Program, the distribution of the whole must be on the terms of | ||
121 | this License, whose permissions for other licensees extend to the | ||
122 | entire whole, and thus to each and every part regardless of who wrote it. | ||
123 | |||
124 | Thus, it is not the intent of this section to claim rights or contest | ||
125 | your rights to work written entirely by you; rather, the intent is to | ||
126 | exercise the right to control the distribution of derivative or | ||
127 | collective works based on the Program. | ||
128 | |||
129 | In addition, mere aggregation of another work not based on the Program | ||
130 | with the Program (or with a work based on the Program) on a volume of | ||
131 | a storage or distribution medium does not bring the other work under | ||
132 | the scope of this License. | ||
133 | |||
134 | 3. You may copy and distribute the Program (or a work based on it, | ||
135 | under Section 2) in object code or executable form under the terms of | ||
136 | Sections 1 and 2 above provided that you also do one of the following: | ||
137 | |||
138 | a) Accompany it with the complete corresponding machine-readable | ||
139 | source code, which must be distributed under the terms of Sections | ||
140 | 1 and 2 above on a medium customarily used for software interchange; or, | ||
141 | |||
142 | b) Accompany it with a written offer, valid for at least three | ||
143 | years, to give any third party, for a charge no more than your | ||
144 | cost of physically performing source distribution, a complete | ||
145 | machine-readable copy of the corresponding source code, to be | ||
146 | distributed under the terms of Sections 1 and 2 above on a medium | ||
147 | customarily used for software interchange; or, | ||
148 | |||
149 | c) Accompany it with the information you received as to the offer | ||
150 | to distribute corresponding source code. (This alternative is | ||
151 | allowed only for noncommercial distribution and only if you | ||
152 | received the program in object code or executable form with such | ||
153 | an offer, in accord with Subsection b above.) | ||
154 | |||
155 | The source code for a work means the preferred form of the work for | ||
156 | making modifications to it. For an executable work, complete source | ||
157 | code means all the source code for all modules it contains, plus any | ||
158 | associated interface definition files, plus the scripts used to | ||
159 | control compilation and installation of the executable. However, as a | ||
160 | special exception, the source code distributed need not include | ||
161 | anything that is normally distributed (in either source or binary | ||
162 | form) with the major components (compiler, kernel, and so on) of the | ||
163 | operating system on which the executable runs, unless that component | ||
164 | itself accompanies the executable. | ||
165 | |||
166 | If distribution of executable or object code is made by offering | ||
167 | access to copy from a designated place, then offering equivalent | ||
168 | access to copy the source code from the same place counts as | ||
169 | distribution of the source code, even though third parties are not | ||
170 | compelled to copy the source along with the object code. | ||
171 | |||
172 | 4. You may not copy, modify, sublicense, or distribute the Program | ||
173 | except as expressly provided under this License. Any attempt | ||
174 | otherwise to copy, modify, sublicense or distribute the Program is | ||
175 | void, and will automatically terminate your rights under this License. | ||
176 | However, parties who have received copies, or rights, from you under | ||
177 | this License will not have their licenses terminated so long as such | ||
178 | parties remain in full compliance. | ||
179 | |||
180 | 5. You are not required to accept this License, since you have not | ||
181 | signed it. However, nothing else grants you permission to modify or | ||
182 | distribute the Program or its derivative works. These actions are | ||
183 | prohibited by law if you do not accept this License. Therefore, by | ||
184 | modifying or distributing the Program (or any work based on the | ||
185 | Program), you indicate your acceptance of this License to do so, and | ||
186 | all its terms and conditions for copying, distributing or modifying | ||
187 | the Program or works based on it. | ||
188 | |||
189 | 6. Each time you redistribute the Program (or any work based on the | ||
190 | Program), the recipient automatically receives a license from the | ||
191 | original licensor to copy, distribute or modify the Program subject to | ||
192 | these terms and conditions. You may not impose any further | ||
193 | restrictions on the recipients' exercise of the rights granted herein. | ||
194 | You are not responsible for enforcing compliance by third parties to | ||
195 | this License. | ||
196 | |||
197 | 7. If, as a consequence of a court judgment or allegation of patent | ||
198 | infringement or for any other reason (not limited to patent issues), | ||
199 | conditions are imposed on you (whether by court order, agreement or | ||
200 | otherwise) that contradict the conditions of this License, they do not | ||
201 | excuse you from the conditions of this License. If you cannot | ||
202 | distribute so as to satisfy simultaneously your obligations under this | ||
203 | License and any other pertinent obligations, then as a consequence you | ||
204 | may not distribute the Program at all. For example, if a patent | ||
205 | license would not permit royalty-free redistribution of the Program by | ||
206 | all those who receive copies directly or indirectly through you, then | ||
207 | the only way you could satisfy both it and this License would be to | ||
208 | refrain entirely from distribution of the Program. | ||
209 | |||
210 | If any portion of this section is held invalid or unenforceable under | ||
211 | any particular circumstance, the balance of the section is intended to | ||
212 | apply and the section as a whole is intended to apply in other | ||
213 | circumstances. | ||
214 | |||
215 | It is not the purpose of this section to induce you to infringe any | ||
216 | patents or other property right claims or to contest validity of any | ||
217 | such claims; this section has the sole purpose of protecting the | ||
218 | integrity of the free software distribution system, which is | ||
219 | implemented by public license practices. Many people have made | ||
220 | generous contributions to the wide range of software distributed | ||
221 | through that system in reliance on consistent application of that | ||
222 | system; it is up to the author/donor to decide if he or she is willing | ||
223 | to distribute software through any other system and a licensee cannot | ||
224 | impose that choice. | ||
225 | |||
226 | This section is intended to make thoroughly clear what is believed to | ||
227 | be a consequence of the rest of this License. | ||
228 | |||
229 | 8. If the distribution and/or use of the Program is restricted in | ||
230 | certain countries either by patents or by copyrighted interfaces, the | ||
231 | original copyright holder who places the Program under this License | ||
232 | may add an explicit geographical distribution limitation excluding | ||
233 | those countries, so that distribution is permitted only in or among | ||
234 | countries not thus excluded. In such case, this License incorporates | ||
235 | the limitation as if written in the body of this License. | ||
236 | |||
237 | 9. The Free Software Foundation may publish revised and/or new versions | ||
238 | of the General Public License from time to time. Such new versions will | ||
239 | be similar in spirit to the present version, but may differ in detail to | ||
240 | address new problems or concerns. | ||
241 | |||
242 | Each version is given a distinguishing version number. If the Program | ||
243 | specifies a version number of this License which applies to it and "any | ||
244 | later version", you have the option of following the terms and conditions | ||
245 | either of that version or of any later version published by the Free | ||
246 | Software Foundation. If the Program does not specify a version number of | ||
247 | this License, you may choose any version ever published by the Free Software | ||
248 | Foundation. | ||
249 | |||
250 | 10. If you wish to incorporate parts of the Program into other free | ||
251 | programs whose distribution conditions are different, write to the author | ||
252 | to ask for permission. For software which is copyrighted by the Free | ||
253 | Software Foundation, write to the Free Software Foundation; we sometimes | ||
254 | make exceptions for this. Our decision will be guided by the two goals | ||
255 | of preserving the free status of all derivatives of our free software and | ||
256 | of promoting the sharing and reuse of software generally. | ||
257 | |||
258 | NO WARRANTY | ||
259 | |||
260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY | ||
261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN | ||
262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES | ||
263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED | ||
264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | ||
265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS | ||
266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE | ||
267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, | ||
268 | REPAIR OR CORRECTION. | ||
269 | |||
270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING | ||
271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR | ||
272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, | ||
273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING | ||
274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED | ||
275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY | ||
276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER | ||
277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE | ||
278 | POSSIBILITY OF SUCH DAMAGES. | ||
279 | |||
280 | END OF TERMS AND CONDITIONS | ||
281 | |||
282 | How to Apply These Terms to Your New Programs | ||
283 | |||
284 | If you develop a new program, and you want it to be of the greatest | ||
285 | possible use to the public, the best way to achieve this is to make it | ||
286 | free software which everyone can redistribute and change under these terms. | ||
287 | |||
288 | To do so, attach the following notices to the program. It is safest | ||
289 | to attach them to the start of each source file to most effectively | ||
290 | convey the exclusion of warranty; and each file should have at least | ||
291 | the "copyright" line and a pointer to where the full notice is found. | ||
292 | |||
293 | <one line to give the program's name and a brief idea of what it does.> | ||
294 | Copyright (C) <year> <name of author> | ||
295 | |||
296 | This program is free software; you can redistribute it and/or modify | ||
297 | it under the terms of the GNU General Public License as published by | ||
298 | the Free Software Foundation; either version 2 of the License, or | ||
299 | (at your option) any later version. | ||
300 | |||
301 | This program is distributed in the hope that it will be useful, | ||
302 | but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
304 | GNU General Public License for more details. | ||
305 | |||
306 | You should have received a copy of the GNU General Public License along | ||
307 | with this program; if not, write to the Free Software Foundation, Inc., | ||
308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
309 | |||
310 | Also add information on how to contact you by electronic and paper mail. | ||
311 | |||
312 | If the program is interactive, make it output a short notice like this | ||
313 | when it starts in an interactive mode: | ||
314 | |||
315 | Gnomovision version 69, Copyright (C) year name of author | ||
316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. | ||
317 | This is free software, and you are welcome to redistribute it | ||
318 | under certain conditions; type `show c' for details. | ||
319 | |||
320 | The hypothetical commands `show w' and `show c' should show the appropriate | ||
321 | parts of the General Public License. Of course, the commands you use may | ||
322 | be called something other than `show w' and `show c'; they could even be | ||
323 | mouse-clicks or menu items--whatever suits your program. | ||
324 | |||
325 | You should also get your employer (if you work as a programmer) or your | ||
326 | school, if any, to sign a "copyright disclaimer" for the program, if | ||
327 | necessary. Here is a sample; alter the names: | ||
328 | |||
329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program | ||
330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. | ||
331 | |||
332 | <signature of Ty Coon>, 1 April 1989 | ||
333 | Ty Coon, President of Vice | ||
334 | |||
335 | This General Public License does not permit incorporating your program into | ||
336 | proprietary programs. If your program is a subroutine library, you may | ||
337 | consider it more useful to permit linking proprietary applications with the | ||
338 | library. If this is what you want to do, use the GNU Lesser General | ||
339 | Public License instead of this License. | ||
diff --git a/meta/recipes-bsp/usbinit/usbinit/usb-gether b/meta/recipes-bsp/usbinit/usbinit/usb-gether deleted file mode 100755 index e80a0bb30e..0000000000 --- a/meta/recipes-bsp/usbinit/usbinit/usb-gether +++ /dev/null | |||
@@ -1,23 +0,0 @@ | |||
1 | #! /bin/sh | ||
2 | # | ||
3 | # usb-ether Start up the gadget usb ethernet interface. | ||
4 | # | ||
5 | |||
6 | case "$1" in | ||
7 | start|"") | ||
8 | test "$VERBOSE" != no && echo "Initializing g_ether gadget..." | ||
9 | modprobe g_ether | ||
10 | ifup usb0 | ||
11 | ;; | ||
12 | stop) | ||
13 | test "$VERBOSE" != no && echo "Disabling g_ether..." | ||
14 | ifdown usb0 | ||
15 | rmmod g_ether | ||
16 | ;; | ||
17 | *) | ||
18 | echo "Usage: usb-ether {start|stop}" >&2 | ||
19 | exit 1 | ||
20 | ;; | ||
21 | esac | ||
22 | |||
23 | exit 0 | ||
diff --git a/meta/recipes-connectivity/bind/bind_9.18.26.bb b/meta/recipes-connectivity/bind/bind_9.18.27.bb index b99f92537c..98b8bb5513 100644 --- a/meta/recipes-connectivity/bind/bind_9.18.26.bb +++ b/meta/recipes-connectivity/bind/bind_9.18.27.bb | |||
@@ -20,7 +20,7 @@ SRC_URI = "https://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.xz \ | |||
20 | file://0001-avoid-start-failure-with-bind-user.patch \ | 20 | file://0001-avoid-start-failure-with-bind-user.patch \ |
21 | " | 21 | " |
22 | 22 | ||
23 | SRC_URI[sha256sum] = "75ffee52731e9604c849b658df29e927f1c4f01d5a71ea3ebcbeb63702cb6651" | 23 | SRC_URI[sha256sum] = "ea3f3d8cfa2f6ae78c8722751d008f54bc17a3aed2be3f7399eb7bf5f4cda8f1" |
24 | 24 | ||
25 | UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/" | 25 | UPSTREAM_CHECK_URI = "https://ftp.isc.org/isc/bind9/" |
26 | # follow the ESV versions divisible by 2 | 26 | # follow the ESV versions divisible by 2 |
diff --git a/meta/recipes-connectivity/connman/connman-conf.bb b/meta/recipes-connectivity/connman/connman-conf.bb index a1a0e08faa..73c54cddae 100644 --- a/meta/recipes-connectivity/connman/connman-conf.bb +++ b/meta/recipes-connectivity/connman/connman-conf.bb | |||
@@ -8,7 +8,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;m | |||
8 | SRC_URI = "file://main.conf \ | 8 | SRC_URI = "file://main.conf \ |
9 | " | 9 | " |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 14 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
14 | 15 | ||
diff --git a/meta/recipes-connectivity/connman/connman-gnome_0.7.bb b/meta/recipes-connectivity/connman/connman-gnome_0.7.bb index f6150d98ec..46b3f854c5 100644 --- a/meta/recipes-connectivity/connman/connman-gnome_0.7.bb +++ b/meta/recipes-connectivity/connman/connman-gnome_0.7.bb | |||
@@ -28,3 +28,7 @@ RDEPENDS:${PN} = "connman" | |||
28 | do_install:append() { | 28 | do_install:append() { |
29 | install -m 0644 ${UNPACKDIR}/images/* ${D}/usr/share/icons/hicolor/22x22/apps/ | 29 | install -m 0644 ${UNPACKDIR}/images/* ${D}/usr/share/icons/hicolor/22x22/apps/ |
30 | } | 30 | } |
31 | |||
32 | # http://errors.yoctoproject.org/Errors/Details/766926/ | ||
33 | # connman-client.c:200:15: error: assignment to 'GtkTreeModel *' {aka 'struct _GtkTreeModel *'} from incompatible pointer type 'GtkTreeStore *' {aka 'struct _GtkTreeStore *'} [-Wincompatible-pointer-types] | ||
34 | CFLAGS += "-Wno-error=incompatible-pointer-types" | ||
diff --git a/meta/recipes-connectivity/iproute2/iproute2_6.8.0.bb b/meta/recipes-connectivity/iproute2/iproute2_6.9.0.bb index 68f7611943..d21885cd73 100644 --- a/meta/recipes-connectivity/iproute2/iproute2_6.8.0.bb +++ b/meta/recipes-connectivity/iproute2/iproute2_6.9.0.bb | |||
@@ -13,7 +13,7 @@ DEPENDS = "flex-native bison-native iptables libcap" | |||
13 | 13 | ||
14 | SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz" | 14 | SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz" |
15 | 15 | ||
16 | SRC_URI[sha256sum] = "03a6cca3d71a908d1f15f7b495be2b8fe851f941458dc4664900d7f45fcf68ce" | 16 | SRC_URI[sha256sum] = "2f643d09ea11a4a2a043c92e2b469b5f73228cbf241ae806760296ed0ec413d0" |
17 | 17 | ||
18 | inherit update-alternatives bash-completion pkgconfig | 18 | inherit update-alternatives bash-completion pkgconfig |
19 | 19 | ||
@@ -26,6 +26,8 @@ PACKAGECONFIG[selinux] = ",,libselinux" | |||
26 | 26 | ||
27 | IPROUTE2_MAKE_SUBDIRS = "lib tc ip bridge misc genl ${@bb.utils.filter('PACKAGECONFIG', 'devlink tipc rdma', d)}" | 27 | IPROUTE2_MAKE_SUBDIRS = "lib tc ip bridge misc genl ${@bb.utils.filter('PACKAGECONFIG', 'devlink tipc rdma', d)}" |
28 | 28 | ||
29 | # This is needed with GCC-14 and musl | ||
30 | CFLAGS += "-Wno-error=incompatible-pointer-types" | ||
29 | # CFLAGS are computed in Makefile and reference CCOPTS | 31 | # CFLAGS are computed in Makefile and reference CCOPTS |
30 | # | 32 | # |
31 | EXTRA_OEMAKE = "\ | 33 | EXTRA_OEMAKE = "\ |
diff --git a/meta/recipes-connectivity/iw/iw_6.7.bb b/meta/recipes-connectivity/iw/iw_6.9.bb index b46b54bc93..dc570d1124 100644 --- a/meta/recipes-connectivity/iw/iw_6.7.bb +++ b/meta/recipes-connectivity/iw/iw_6.9.bb | |||
@@ -14,7 +14,7 @@ SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \ | |||
14 | file://separate-objdir.patch \ | 14 | file://separate-objdir.patch \ |
15 | " | 15 | " |
16 | 16 | ||
17 | SRC_URI[sha256sum] = "b3ef3fa85fa1177b11d3e97d6d38cdfe10ee250ca31482b581f3bd0fc79cb015" | 17 | SRC_URI[sha256sum] = "4c3194778b175d58442907d51d1977e7270fce5cbebff0eab11c45c1da287a4b" |
18 | 18 | ||
19 | inherit pkgconfig | 19 | inherit pkgconfig |
20 | 20 | ||
diff --git a/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb b/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb index a4030b7b32..06ded45934 100644 --- a/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb +++ b/meta/recipes-connectivity/mobile-broadband-provider-info/mobile-broadband-provider-info_git.bb | |||
@@ -5,13 +5,13 @@ SECTION = "network" | |||
5 | LICENSE = "PD" | 5 | LICENSE = "PD" |
6 | LIC_FILES_CHKSUM = "file://COPYING;md5=87964579b2a8ece4bc6744d2dc9a8b04" | 6 | LIC_FILES_CHKSUM = "file://COPYING;md5=87964579b2a8ece4bc6744d2dc9a8b04" |
7 | 7 | ||
8 | SRCREV = "aae7c68671d225e6d35224613d5b98192b9b2ffe" | 8 | SRCREV = "55ba955d53305df96123534488fd160ea882b4dd" |
9 | PV = "20230416" | 9 | PV = "20240407" |
10 | PE = "1" | 10 | PE = "1" |
11 | 11 | ||
12 | SRC_URI = "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=main" | 12 | SRC_URI = "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https;branch=main" |
13 | S = "${WORKDIR}/git" | 13 | S = "${WORKDIR}/git" |
14 | 14 | ||
15 | inherit autotools | 15 | inherit meson |
16 | 16 | ||
17 | DEPENDS += "libxslt-native" | 17 | DEPENDS += "libxslt-native" |
diff --git a/meta/recipes-connectivity/ofono/ofono/0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch b/meta/recipes-connectivity/ofono/ofono/0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch deleted file mode 100644 index 8a5a300adc..0000000000 --- a/meta/recipes-connectivity/ofono/ofono/0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch +++ /dev/null | |||
@@ -1,36 +0,0 @@ | |||
1 | From 22b52db4842611ac31a356f023fc09595384e2ad Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Thu, 23 May 2019 18:11:22 -0700 | ||
4 | Subject: [PATCH] mbim: add an optional TEMP_FAILURE_RETRY macro copy | ||
5 | |||
6 | Fixes build on musl which does not provide this macro | ||
7 | |||
8 | Upstream-Status: Submitted [https://lists.ofono.org/pipermail/ofono/2019-May/019370.html] | ||
9 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
10 | --- | ||
11 | drivers/mbimmodem/mbim-private.h | 9 +++++++++ | ||
12 | 1 file changed, 9 insertions(+) | ||
13 | |||
14 | diff --git a/drivers/mbimmodem/mbim-private.h b/drivers/mbimmodem/mbim-private.h | ||
15 | index e159235..51693ea 100644 | ||
16 | --- a/drivers/mbimmodem/mbim-private.h | ||
17 | +++ b/drivers/mbimmodem/mbim-private.h | ||
18 | @@ -21,6 +21,15 @@ | ||
19 | |||
20 | #define align_len(len, boundary) (((len)+(boundary)-1) & ~((boundary)-1)) | ||
21 | |||
22 | +#ifndef TEMP_FAILURE_RETRY | ||
23 | +#define TEMP_FAILURE_RETRY(expression) ({ \ | ||
24 | + __typeof(expression) __result; \ | ||
25 | + do { \ | ||
26 | + __result = (expression); \ | ||
27 | + } while (__result == -1 && errno == EINTR); \ | ||
28 | + __result; }) | ||
29 | +#endif | ||
30 | + | ||
31 | enum mbim_control_message { | ||
32 | MBIM_OPEN_MSG = 0x1, | ||
33 | MBIM_CLOSE_MSG = 0x2, | ||
34 | -- | ||
35 | 2.21.0 | ||
36 | |||
diff --git a/meta/recipes-connectivity/ofono/ofono/0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch b/meta/recipes-connectivity/ofono/ofono/0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch deleted file mode 100644 index 3655b3fd66..0000000000 --- a/meta/recipes-connectivity/ofono/ofono/0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch +++ /dev/null | |||
@@ -1,28 +0,0 @@ | |||
1 | From 76e4054801350ebd4a44057379431a33d460ad0f Mon Sep 17 00:00:00 2001 | ||
2 | From: Martin Jansa <Martin.Jansa@gmail.com> | ||
3 | Date: Wed, 21 Apr 2021 11:01:34 +0000 | ||
4 | Subject: [PATCH] mbim: Fix build with ell-0.39 by restoring unlikely macro | ||
5 | from ell/util.h | ||
6 | |||
7 | Upstream-Status: Pending | ||
8 | |||
9 | Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com> | ||
10 | --- | ||
11 | drivers/mbimmodem/mbim-private.h | 4 ++++ | ||
12 | 1 file changed, 4 insertions(+) | ||
13 | |||
14 | diff --git a/drivers/mbimmodem/mbim-private.h b/drivers/mbimmodem/mbim-private.h | ||
15 | index 51693eae..d917312c 100644 | ||
16 | --- a/drivers/mbimmodem/mbim-private.h | ||
17 | +++ b/drivers/mbimmodem/mbim-private.h | ||
18 | @@ -30,6 +30,10 @@ | ||
19 | __result; }) | ||
20 | #endif | ||
21 | |||
22 | +/* used to be part of ell/util.h before 0.39: | ||
23 | + https://git.kernel.org/pub/scm/libs/ell/ell.git/commit/?id=2a682421b06e41c45098217a686157f576847021 */ | ||
24 | +#define unlikely(x) __builtin_expect(!!(x), 0) | ||
25 | + | ||
26 | enum mbim_control_message { | ||
27 | MBIM_OPEN_MSG = 0x1, | ||
28 | MBIM_CLOSE_MSG = 0x2, | ||
diff --git a/meta/recipes-connectivity/ofono/ofono_2.4.bb b/meta/recipes-connectivity/ofono/ofono_2.7.bb index 5e1e5f3b6a..bc1f3efd04 100644 --- a/meta/recipes-connectivity/ofono/ofono_2.4.bb +++ b/meta/recipes-connectivity/ofono/ofono_2.7.bb | |||
@@ -10,10 +10,8 @@ DEPENDS = "dbus glib-2.0 udev mobile-broadband-provider-info ell" | |||
10 | SRC_URI = "\ | 10 | SRC_URI = "\ |
11 | ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \ | 11 | ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \ |
12 | file://ofono \ | 12 | file://ofono \ |
13 | file://0001-mbim-add-an-optional-TEMP_FAILURE_RETRY-macro-copy.patch \ | ||
14 | file://0002-mbim-Fix-build-with-ell-0.39-by-restoring-unlikely-m.patch \ | ||
15 | " | 13 | " |
16 | SRC_URI[sha256sum] = "93580adc1afd1890dc516efb069de0c5cdfef014415256ddfb28ab172df2d11d" | 14 | SRC_URI[sha256sum] = "dabf6ef06b94beaad65253200abe3887046a4e722f4fe373c4264f357ae47ad3" |
17 | 15 | ||
18 | inherit autotools pkgconfig update-rc.d systemd gobject-introspection-data | 16 | inherit autotools pkgconfig update-rc.d systemd gobject-introspection-data |
19 | 17 | ||
@@ -30,11 +28,6 @@ PACKAGECONFIG[bluez] = "--enable-bluetooth, --disable-bluetooth, bluez5" | |||
30 | 28 | ||
31 | EXTRA_OECONF += "--enable-test --enable-external-ell" | 29 | EXTRA_OECONF += "--enable-test --enable-external-ell" |
32 | 30 | ||
33 | do_configure:prepend() { | ||
34 | bbnote "Removing bundled ell from ${S}/ell to prevent including it" | ||
35 | rm -rf ${S}/ell | ||
36 | } | ||
37 | |||
38 | do_install:append() { | 31 | do_install:append() { |
39 | install -d ${D}${sysconfdir}/init.d/ | 32 | install -d ${D}${sysconfdir}/init.d/ |
40 | install -m 0755 ${UNPACKDIR}/ofono ${D}${sysconfdir}/init.d/ofono | 33 | install -m 0755 ${UNPACKDIR}/ofono ${D}${sysconfdir}/init.d/ofono |
diff --git a/meta/recipes-connectivity/openssh/openssh_9.7p1.bb b/meta/recipes-connectivity/openssh/openssh_9.7p1.bb index 36ffa49398..82da92f63f 100644 --- a/meta/recipes-connectivity/openssh/openssh_9.7p1.bb +++ b/meta/recipes-connectivity/openssh/openssh_9.7p1.bb | |||
@@ -112,7 +112,7 @@ do_compile_ptest() { | |||
112 | 112 | ||
113 | do_install:append () { | 113 | do_install:append () { |
114 | if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then | 114 | if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then |
115 | install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd | 115 | install -D -m 0644 ${UNPACKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd |
116 | sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config | 116 | sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config |
117 | fi | 117 | fi |
118 | 118 | ||
@@ -121,11 +121,11 @@ do_install:append () { | |||
121 | fi | 121 | fi |
122 | 122 | ||
123 | install -d ${D}${sysconfdir}/init.d | 123 | install -d ${D}${sysconfdir}/init.d |
124 | install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/sshd | 124 | install -m 0755 ${UNPACKDIR}/init ${D}${sysconfdir}/init.d/sshd |
125 | rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin | 125 | rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin |
126 | rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir} | 126 | rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir} |
127 | install -d ${D}/${sysconfdir}/default/volatiles | 127 | install -d ${D}/${sysconfdir}/default/volatiles |
128 | install -m 644 ${WORKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd | 128 | install -m 644 ${UNPACKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd |
129 | install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir} | 129 | install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir} |
130 | 130 | ||
131 | # Create config files for read-only rootfs | 131 | # Create config files for read-only rootfs |
@@ -138,8 +138,8 @@ do_install:append () { | |||
138 | 138 | ||
139 | install -d ${D}${systemd_system_unitdir} | 139 | install -d ${D}${systemd_system_unitdir} |
140 | if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-socket-mode','true','false',d)}; then | 140 | if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-socket-mode','true','false',d)}; then |
141 | install -c -m 0644 ${WORKDIR}/sshd.socket ${D}${systemd_system_unitdir} | 141 | install -c -m 0644 ${UNPACKDIR}/sshd.socket ${D}${systemd_system_unitdir} |
142 | install -c -m 0644 ${WORKDIR}/sshd@.service ${D}${systemd_system_unitdir} | 142 | install -c -m 0644 ${UNPACKDIR}/sshd@.service ${D}${systemd_system_unitdir} |
143 | sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ | 143 | sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ |
144 | -e 's,@SBINDIR@,${sbindir},g' \ | 144 | -e 's,@SBINDIR@,${sbindir},g' \ |
145 | -e 's,@BINDIR@,${bindir},g' \ | 145 | -e 's,@BINDIR@,${bindir},g' \ |
@@ -147,9 +147,9 @@ do_install:append () { | |||
147 | ${D}${systemd_system_unitdir}/sshd.socket | 147 | ${D}${systemd_system_unitdir}/sshd.socket |
148 | fi | 148 | fi |
149 | if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-service-mode','true','false',d)}; then | 149 | if ${@bb.utils.contains('PACKAGECONFIG','systemd-sshd-service-mode','true','false',d)}; then |
150 | install -c -m 0644 ${WORKDIR}/sshd.service ${D}${systemd_system_unitdir} | 150 | install -c -m 0644 ${UNPACKDIR}/sshd.service ${D}${systemd_system_unitdir} |
151 | fi | 151 | fi |
152 | install -c -m 0644 ${WORKDIR}/sshdgenkeys.service ${D}${systemd_system_unitdir} | 152 | install -c -m 0644 ${UNPACKDIR}/sshdgenkeys.service ${D}${systemd_system_unitdir} |
153 | sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ | 153 | sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ |
154 | -e 's,@SBINDIR@,${sbindir},g' \ | 154 | -e 's,@SBINDIR@,${sbindir},g' \ |
155 | -e 's,@BINDIR@,${bindir},g' \ | 155 | -e 's,@BINDIR@,${bindir},g' \ |
@@ -159,7 +159,7 @@ do_install:append () { | |||
159 | sed -i -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \ | 159 | sed -i -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \ |
160 | ${D}${sysconfdir}/init.d/sshd | 160 | ${D}${sysconfdir}/init.d/sshd |
161 | 161 | ||
162 | install -D -m 0755 ${WORKDIR}/sshd_check_keys ${D}${libexecdir}/${BPN}/sshd_check_keys | 162 | install -D -m 0755 ${UNPACKDIR}/sshd_check_keys ${D}${libexecdir}/${BPN}/sshd_check_keys |
163 | } | 163 | } |
164 | 164 | ||
165 | do_install_ptest () { | 165 | do_install_ptest () { |
diff --git a/meta/recipes-connectivity/openssl/openssl/0001-Implement-riscv_vlen_asm-for-riscv32.patch b/meta/recipes-connectivity/openssl/openssl/0001-Implement-riscv_vlen_asm-for-riscv32.patch new file mode 100644 index 0000000000..e398d1074a --- /dev/null +++ b/meta/recipes-connectivity/openssl/openssl/0001-Implement-riscv_vlen_asm-for-riscv32.patch | |||
@@ -0,0 +1,43 @@ | |||
1 | From 725b1530456545e8511adc9cbdd265309dffad53 Mon Sep 17 00:00:00 2001 | ||
2 | From: Hongren Zheng <i@zenithal.me> | ||
3 | Date: Fri, 26 Apr 2024 06:03:43 +0000 | ||
4 | Subject: [PATCH] Implement riscv_vlen_asm for riscv32 | ||
5 | |||
6 | riscvcap.c: undefined reference to 'riscv_vlen_asm' | ||
7 | |||
8 | Upstream-Status: Backport [https://github.com/openssl/openssl/pull/24270] | ||
9 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
10 | --- | ||
11 | crypto/riscv32cpuid.pl | 17 +++++++++++++++++ | ||
12 | 1 file changed, 17 insertions(+) | ||
13 | |||
14 | diff --git a/crypto/riscv32cpuid.pl b/crypto/riscv32cpuid.pl | ||
15 | index 20694e7..ac1c043 100644 | ||
16 | --- a/crypto/riscv32cpuid.pl | ||
17 | +++ b/crypto/riscv32cpuid.pl | ||
18 | @@ -84,5 +84,22 @@ OPENSSL_cleanse: | ||
19 | ___ | ||
20 | } | ||
21 | |||
22 | +{ | ||
23 | +my ($ret) = ('a0'); | ||
24 | +$code .= <<___; | ||
25 | +################################################################################ | ||
26 | +# size_t riscv_vlen_asm(void) | ||
27 | +# Return VLEN (i.e. the length of a vector register in bits). | ||
28 | +.p2align 3 | ||
29 | +.globl riscv_vlen_asm | ||
30 | +.type riscv_vlen_asm,\@function | ||
31 | +riscv_vlen_asm: | ||
32 | + csrr $ret, vlenb | ||
33 | + slli $ret, $ret, 3 | ||
34 | + ret | ||
35 | +.size riscv_vlen_asm,.-riscv_vlen_asm | ||
36 | +___ | ||
37 | +} | ||
38 | + | ||
39 | print $code; | ||
40 | close STDOUT or die "error closing STDOUT: $!"; | ||
41 | -- | ||
42 | 2.45.0 | ||
43 | |||
diff --git a/meta/recipes-connectivity/openssl/openssl/CVE-2024-4603.patch b/meta/recipes-connectivity/openssl/openssl/CVE-2024-4603.patch new file mode 100644 index 0000000000..cdc3d0d503 --- /dev/null +++ b/meta/recipes-connectivity/openssl/openssl/CVE-2024-4603.patch | |||
@@ -0,0 +1,179 @@ | |||
1 | From 53ea06486d296b890d565fb971b2764fcd826e7e Mon Sep 17 00:00:00 2001 | ||
2 | From: Tomas Mraz <tomas@openssl.org> | ||
3 | Date: Wed, 8 May 2024 15:23:45 +0200 | ||
4 | Subject: [PATCH] Check DSA parameters for excessive sizes before validating | ||
5 | |||
6 | This avoids overly long computation of various validation | ||
7 | checks. | ||
8 | |||
9 | Fixes CVE-2024-4603 | ||
10 | |||
11 | Reviewed-by: Paul Dale <ppzgs1@gmail.com> | ||
12 | Reviewed-by: Matt Caswell <matt@openssl.org> | ||
13 | Reviewed-by: Neil Horman <nhorman@openssl.org> | ||
14 | Reviewed-by: Shane Lontis <shane.lontis@oracle.com> | ||
15 | (Merged from https://github.com/openssl/openssl/pull/24346) | ||
16 | |||
17 | (cherry picked from commit 85ccbab216da245cf9a6503dd327072f21950d9b) | ||
18 | |||
19 | <dropped CHANGES.md modifications as it would need backport of all previous changes> | ||
20 | |||
21 | CVE: CVE-2024-4603 | ||
22 | Upstream-Status: Backport [https://github.com/openssl/openssl/commit/53ea06486d296b890d565fb971b2764fcd826e7e] | ||
23 | Signed-off-by: Peter Marko <peter.marko@siemens.com> | ||
24 | --- | ||
25 | crypto/dsa/dsa_check.c | 44 ++++++++++++-- | ||
26 | .../invalid/p10240_q256_too_big.pem | 57 +++++++++++++++++++ | ||
27 | 2 files changed, 97 insertions(+), 4 deletions(-) | ||
28 | |||
29 | diff --git a/crypto/dsa/dsa_check.c b/crypto/dsa/dsa_check.c | ||
30 | index 7b6d7df88f..e1375dfad9 100644 | ||
31 | --- a/crypto/dsa/dsa_check.c | ||
32 | +++ b/crypto/dsa/dsa_check.c | ||
33 | @@ -19,8 +19,34 @@ | ||
34 | #include "dsa_local.h" | ||
35 | #include "crypto/dsa.h" | ||
36 | |||
37 | +static int dsa_precheck_params(const DSA *dsa, int *ret) | ||
38 | +{ | ||
39 | + if (dsa->params.p == NULL || dsa->params.q == NULL) { | ||
40 | + ERR_raise(ERR_LIB_DSA, DSA_R_BAD_FFC_PARAMETERS); | ||
41 | + *ret = FFC_CHECK_INVALID_PQ; | ||
42 | + return 0; | ||
43 | + } | ||
44 | + | ||
45 | + if (BN_num_bits(dsa->params.p) > OPENSSL_DSA_MAX_MODULUS_BITS) { | ||
46 | + ERR_raise(ERR_LIB_DSA, DSA_R_MODULUS_TOO_LARGE); | ||
47 | + *ret = FFC_CHECK_INVALID_PQ; | ||
48 | + return 0; | ||
49 | + } | ||
50 | + | ||
51 | + if (BN_num_bits(dsa->params.q) >= BN_num_bits(dsa->params.p)) { | ||
52 | + ERR_raise(ERR_LIB_DSA, DSA_R_BAD_Q_VALUE); | ||
53 | + *ret = FFC_CHECK_INVALID_PQ; | ||
54 | + return 0; | ||
55 | + } | ||
56 | + | ||
57 | + return 1; | ||
58 | +} | ||
59 | + | ||
60 | int ossl_dsa_check_params(const DSA *dsa, int checktype, int *ret) | ||
61 | { | ||
62 | + if (!dsa_precheck_params(dsa, ret)) | ||
63 | + return 0; | ||
64 | + | ||
65 | if (checktype == OSSL_KEYMGMT_VALIDATE_QUICK_CHECK) | ||
66 | return ossl_ffc_params_simple_validate(dsa->libctx, &dsa->params, | ||
67 | FFC_PARAM_TYPE_DSA, ret); | ||
68 | @@ -39,6 +65,9 @@ int ossl_dsa_check_params(const DSA *dsa, int checktype, int *ret) | ||
69 | */ | ||
70 | int ossl_dsa_check_pub_key(const DSA *dsa, const BIGNUM *pub_key, int *ret) | ||
71 | { | ||
72 | + if (!dsa_precheck_params(dsa, ret)) | ||
73 | + return 0; | ||
74 | + | ||
75 | return ossl_ffc_validate_public_key(&dsa->params, pub_key, ret) | ||
76 | && *ret == 0; | ||
77 | } | ||
78 | @@ -50,6 +79,9 @@ int ossl_dsa_check_pub_key(const DSA *dsa, const BIGNUM *pub_key, int *ret) | ||
79 | */ | ||
80 | int ossl_dsa_check_pub_key_partial(const DSA *dsa, const BIGNUM *pub_key, int *ret) | ||
81 | { | ||
82 | + if (!dsa_precheck_params(dsa, ret)) | ||
83 | + return 0; | ||
84 | + | ||
85 | return ossl_ffc_validate_public_key_partial(&dsa->params, pub_key, ret) | ||
86 | && *ret == 0; | ||
87 | } | ||
88 | @@ -58,8 +90,10 @@ int ossl_dsa_check_priv_key(const DSA *dsa, const BIGNUM *priv_key, int *ret) | ||
89 | { | ||
90 | *ret = 0; | ||
91 | |||
92 | - return (dsa->params.q != NULL | ||
93 | - && ossl_ffc_validate_private_key(dsa->params.q, priv_key, ret)); | ||
94 | + if (!dsa_precheck_params(dsa, ret)) | ||
95 | + return 0; | ||
96 | + | ||
97 | + return ossl_ffc_validate_private_key(dsa->params.q, priv_key, ret); | ||
98 | } | ||
99 | |||
100 | /* | ||
101 | @@ -72,8 +106,10 @@ int ossl_dsa_check_pairwise(const DSA *dsa) | ||
102 | BN_CTX *ctx = NULL; | ||
103 | BIGNUM *pub_key = NULL; | ||
104 | |||
105 | - if (dsa->params.p == NULL | ||
106 | - || dsa->params.g == NULL | ||
107 | + if (!dsa_precheck_params(dsa, &ret)) | ||
108 | + return 0; | ||
109 | + | ||
110 | + if (dsa->params.g == NULL | ||
111 | || dsa->priv_key == NULL | ||
112 | || dsa->pub_key == NULL) | ||
113 | return 0; | ||
114 | diff --git a/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem b/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem | ||
115 | new file mode 100644 | ||
116 | index 0000000000..e85e2953b7 | ||
117 | --- /dev/null | ||
118 | +++ b/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem | ||
119 | @@ -0,0 +1,57 @@ | ||
120 | +-----BEGIN DSA PARAMETERS----- | ||
121 | +MIIKLAKCBQEAym47LzPFZdbz16WvjczLKuzLtsP8yRk/exxL4bBthJhP1qOwctja | ||
122 | +p1586SF7gDxCMn7yWVEYdfRbFefGoq0gj1XOE917XqlbnkmZhMgxut2KbNJo/xil | ||
123 | +XNFUjGvKs3F413U9rAodC8f07cWHP1iTcWL+vPe6u2yilKWYYfnLWHQH+Z6aPrrF | ||
124 | +x/R08LI6DZ6nEsIo+hxaQnEtx+iqNTJC6Q1RIjWDqxQkFVTkJ0Y7miRDXmRdneWk | ||
125 | +oLrMZRpaXr5l5tSjEghh1pBgJcdyOv0lh4dlDy/alAiqE2Qlb667yHl6A9dDPlpW | ||
126 | +dAntpffy4LwOxfbuEhISvKjjQoBwIvYE4TBPqL0Q6bC6HgQ4+tqd9b44pQjdIQjb | ||
127 | +Xcjc6azheITSnPEex3OdKtKoQeRq01qCeLBpMXu1c+CTf4ApKArZvT3vZSg0hM1O | ||
128 | +pR71bRZrEEegDj0LH2HCgI5W6H3blOS9A0kUTddCoQXr2lsVdiPtRbPKH1gcd9FQ | ||
129 | +P8cGrvbakpTiC0dCczOMDaCteM1QNILlkM7ZoV6VghsKvDnFPxFsiIr5GgjasXP5 | ||
130 | +hhbn3g7sDoq1LiTEo+IKQY28pBWx7etSOSRuXW/spnvCkivZla7lSEGljoy9QlQ2 | ||
131 | +UZmsEQI9G3YyzgpxHvKZBK1CiZVTywdYKTZ4TYCxvqzhYhjv2bqbpjI12HRFLojB | ||
132 | +koyEmMSp53lldCzp158PrIanqSp2rksMR8SmmCL3FwfAp2OjqFMEglG9DT8x0WaN | ||
133 | +TLSkjGC6t2csMte7WyU1ekNoFDKfMjDSAz0+xIx21DEmZtYqFOg1DNPK1xYLS0pl | ||
134 | +RSMRRkJVN2mk/G7/1oxlB8Wb9wgi3GKUqqCYT11SnBjzq0NdoJ3E4GMedp5Lx3AZ | ||
135 | +4mFuRPUd4iV86tE0XDSHSFE7Y3ZkrOjD7Q/26/L53L/UH5z4HW6CHP5os7QERJjg | ||
136 | +c1S3x87wXWo9QXbB9b2xmf+c+aWwAAr1cviw38tru58jF3/IGyduj9H8claKQqBG | ||
137 | +cIOUF4aNe1hK2K3ArAOApUxr4KE+tCvrltRfiTmVFip0g9Jt1CPY3Zu7Bd4Z2ZkE | ||
138 | +DtSztpwa49HrWF5E9xpquvBL2U8jQ68E7Xd8Wp4orI/TIChriamBmdkgRz3H2LvN | ||
139 | +Ozb6+hsnEGrz3sp2RVAToSqA9ysa6nHZdfufPNtMEbQdO/k1ehmGRb0ljBRsO6b2 | ||
140 | +rsG2eYuC8tg8eCrIkua0TGRI7g6a4K32AJdzaX6NsISaaIW+OYJuoDSscvD3oOg8 | ||
141 | +PPEhU+zM7xJskTA+jxvPlikKx8V7MNHOCQECldJlUBwzJvqp40JvwfnDsF+8VYwd | ||
142 | +UaiieR3pzMzyTjpReXRmZbnRPusRcsVzxb2OhB79wmuy4UPjjQBX+7eD0rs8xxvW | ||
143 | +5a5q1Cjq4AvbwmmcA/wDrHDOjcbD/zodad2O1QtBWa/R4xyWea4zKsflgACE1zY9 | ||
144 | +wW2br7+YQFekcrXkkkEzgxd6zxv8KVEDpXRZjmAM1cI5LvkoN64To4GedN8Qe/G7 | ||
145 | +R9SZh9gnS17PTP64hK+aYqhFafMdu87q/+qLfxaSux727qE5hiW01u4nnWhACf9s | ||
146 | +xuOozowKqxZxkolMIyZv6Lddwy1Zv5qjCyd0DvM/1skpXWkb9kfabYC+OhjsjVhs | ||
147 | +0Ktfs6a5B3eixiw5x94hhIcTEcS4hmvhGUL72FiTca6ZeSERTKmNBy8CIQC9/ZUN | ||
148 | +uU/V5JTcnYyUGHzm7+XcZBjyGBagBj9rCmW3SQKCBQAJ/k9rb39f1cO+/3XDEMjy | ||
149 | +9bIEXSuS48g5RAc1UGd5nrrBQwuDxGWFyz0yvAY7LgyidZuJS21+MAp9EY7AOMmx | ||
150 | +TDttifNaBJYt4GZ8of166PcqTKkHQwq5uBpxeSDv/ZE8YbYfaCtLTcUC8KlO+l36 | ||
151 | +gjJHSkdkflSsGy1yObSNDQDfVAAwQs//TjDMnuEtvlNXZllsTvFFBceXVETn10K2 | ||
152 | +ZMmdSIJNfLnjReUKEN6PfeGqv7F4xoyGwUybEfRE4u5RmXrqCODaIjY3SNMrOq8B | ||
153 | +R3Ata/cCozsM1jIdIW2z+OybDJH+BYsYm2nkSZQjZS6javTYClLrntEKG/hAQwL8 | ||
154 | +F16YLOQXpHhgiAaWnTZzANtLppB2+5qCVy5ElzKongOwT8JTjTFXOaRnqe/ngm9W | ||
155 | +SSbrxfDaoWUOyK9XD8Cydzpv3n4Y8nWNGayi7/yAFCU36Ri040ufgv/TZLuKacnl | ||
156 | ++3ga3ZUpRlSigzx0kb1+KjTSWeQ8vE/psdWjvBukVEbzdUauMLyRLo/6znSVvvPX | ||
157 | +UGhviThE5uhrsUg+wEPFINriSHfF7JDKVhDcJnLBdaXvfN52pkF/naLBF5Rt3Gvq | ||
158 | +fjCxjx0Sy9Lag1hDN4dor7dzuO7wmwOS01DJW1PtNLuuH0Bbqh1kYSaQkmyXBZWX | ||
159 | +qo8K3nkoDM0niOtJJubOhTNrGmSaZpNXkK3Mcy9rBbdvEs5O0Jmqaax/eOdU0Yot | ||
160 | +B3lX+3ddOseT2ZEFjzObqTtkWuFBeBxuYNcRTsu3qMdIBsEb8URQdsTtjoIja2fK | ||
161 | +hreVgjK36GW70KXEl8V/vq5qjQulmqkBEjmilcDuiREKqQuyeagUOnhQaBplqVco | ||
162 | +4xznh5DMBMRbpGb5lHxKv4cPNi+uNAJ5i98zWUM1JRt6aXnRCuWcll1z8fRZ+5kD | ||
163 | +vK9FaZU3VRMK/eknEG49cGr8OuJ6ZRSaC+tKwV1y+amkSZpKPWnk2bUnQI3ApJv3 | ||
164 | +k1e1EToeECpMUkLMDgNbpKBoz4nqMEvAAlYgw9xKNbLlQlahqTVEAmaJHh4yDMDy | ||
165 | +i7IZ9Wrn47IGoR7s3cvhDHUpRPeW4nsmgzj+tf5EAxemI61STZJTTWo0iaPGJxct | ||
166 | +9nhOOhw1I38Mvm4vkAbFH7YJ0B6QrjjYL2MbOTp5JiIh4vdOeWwNo9/y4ffyaN5+ | ||
167 | +ADpxuuIAmcbdr6GPOhkOFFixRJa0B2eP1i032HESlLs8RB9oYtdTXdXQotnIgJGd | ||
168 | +Y8tSKOa1zjzeLHn3AVpRZTUW++/BxmApV3GKIeG8fsUjg/df0QRrBcdC/1uccdaG | ||
169 | +KKlAOwlywVn5jUlwHkTmDiTM9w5AqVVGHZ2b+4ZgQW8jnPKN0SrKf6U555D+zp7E | ||
170 | +x4uXoE8ojN9y8m8UKf0cTLnujH2XgZorjPfuMOt5VZEhQFMS2QaljSeni5CJJ8gk | ||
171 | +XtztNqfBlAtWR4V5iAHeQOfIB2YaOy8GESda89tyKraKeaez41VblpTVHTeq9IIF | ||
172 | +YB4cQA2PfuNaGVRGLMAgT3Dvl+mxxxeJyxnGAiUcETU/jJJt9QombiuszBlYGQ5d | ||
173 | +ELOSm/eQSRARV9zNSt5jaQlMSjMBqenIEM09BzYqa7jDwqoztFxNdO8bcuQPuKwa | ||
174 | +4z3bBZ1yYm63WFdNbQqqGEwc0OYmqg1raJ0zltgHyjFyw8IGu4g/wETs+nVQcH7D | ||
175 | +vKuje86bePD6kD/LH3wmkA== | ||
176 | +-----END DSA PARAMETERS----- | ||
177 | -- | ||
178 | 2.30.2 | ||
179 | |||
diff --git a/meta/recipes-connectivity/openssl/openssl_3.3.0.bb b/meta/recipes-connectivity/openssl/openssl_3.3.0.bb index 113ed4bf95..a361185b65 100644 --- a/meta/recipes-connectivity/openssl/openssl_3.3.0.bb +++ b/meta/recipes-connectivity/openssl/openssl_3.3.0.bb | |||
@@ -12,7 +12,9 @@ SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \ | |||
12 | file://0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch \ | 12 | file://0001-buildinfo-strip-sysroot-and-debug-prefix-map-from-co.patch \ |
13 | file://0001-Configure-do-not-tweak-mips-cflags.patch \ | 13 | file://0001-Configure-do-not-tweak-mips-cflags.patch \ |
14 | file://0001-Added-handshake-history-reporting-when-test-fails.patch \ | 14 | file://0001-Added-handshake-history-reporting-when-test-fails.patch \ |
15 | file://0001-Implement-riscv_vlen_asm-for-riscv32.patch \ | ||
15 | file://bti.patch \ | 16 | file://bti.patch \ |
17 | file://CVE-2024-4603.patch \ | ||
16 | " | 18 | " |
17 | 19 | ||
18 | SRC_URI:append:class-nativesdk = " \ | 20 | SRC_URI:append:class-nativesdk = " \ |
diff --git a/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb b/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb index 0c3085d3a8..0ee47d47c2 100644 --- a/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb +++ b/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb | |||
@@ -11,7 +11,8 @@ SRC_URI = "file://host-peer \ | |||
11 | 11 | ||
12 | inherit allarch useradd | 12 | inherit allarch useradd |
13 | 13 | ||
14 | S = "${WORKDIR}" | 14 | S = "${WORKDIR}/sources" |
15 | UNPACKDIR = "${S}" | ||
15 | 16 | ||
16 | do_install() { | 17 | do_install() { |
17 | install -d ${D}${sysconfdir}/ppp/peers | 18 | install -d ${D}${sysconfdir}/ppp/peers |
diff --git a/meta/recipes-connectivity/slirp/libslirp_git.bb b/meta/recipes-connectivity/slirp/libslirp_git.bb index 334b786b9b..05830ce833 100644 --- a/meta/recipes-connectivity/slirp/libslirp_git.bb +++ b/meta/recipes-connectivity/slirp/libslirp_git.bb | |||
@@ -5,8 +5,8 @@ LICENSE = "BSD-3-Clause & MIT" | |||
5 | LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=bca0186b14e6b05e338e729f106db727" | 5 | LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=bca0186b14e6b05e338e729f106db727" |
6 | 6 | ||
7 | SRC_URI = "git://gitlab.freedesktop.org/slirp/libslirp.git;protocol=https;branch=master" | 7 | SRC_URI = "git://gitlab.freedesktop.org/slirp/libslirp.git;protocol=https;branch=master" |
8 | SRCREV = "3ad1710a96678fe79066b1469cead4058713a1d9" | 8 | SRCREV = "ce314e39458223c2c42245fe536fbe1bcd94e9b1" |
9 | PV = "4.7.0" | 9 | PV = "4.8.0" |
10 | S = "${WORKDIR}/git" | 10 | S = "${WORKDIR}/git" |
11 | 11 | ||
12 | DEPENDS = " \ | 12 | DEPENDS = " \ |
diff --git a/meta/recipes-connectivity/ssh-pregen-hostkeys/ssh-pregen-hostkeys_1.0.bb b/meta/recipes-connectivity/ssh-pregen-hostkeys/ssh-pregen-hostkeys_1.0.bb index ede18a0031..db5a0eb8ce 100644 --- a/meta/recipes-connectivity/ssh-pregen-hostkeys/ssh-pregen-hostkeys_1.0.bb +++ b/meta/recipes-connectivity/ssh-pregen-hostkeys/ssh-pregen-hostkeys_1.0.bb | |||
@@ -6,8 +6,13 @@ SRC_URI = "file://dropbear_rsa_host_key \ | |||
6 | LICENSE = "MIT" | 6 | LICENSE = "MIT" |
7 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | 7 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
8 | 8 | ||
9 | S = "${WORKDIR}/sources" | ||
10 | UNPACKDIR = "${S}" | ||
11 | |||
9 | INHIBIT_DEFAULT_DEPS = "1" | 12 | INHIBIT_DEFAULT_DEPS = "1" |
10 | 13 | ||
14 | COMPATIBLE_MACHINE = "^qemu.*$" | ||
15 | |||
11 | do_install () { | 16 | do_install () { |
12 | install -d ${D}${sysconfdir}/dropbear | 17 | install -d ${D}${sysconfdir}/dropbear |
13 | install ${UNPACKDIR}/dropbear_rsa_host_key -m 0600 ${D}${sysconfdir}/dropbear/ | 18 | install ${UNPACKDIR}/dropbear_rsa_host_key -m 0600 ${D}${sysconfdir}/dropbear/ |
@@ -16,4 +21,4 @@ do_install () { | |||
16 | install ${UNPACKDIR}/openssh/* ${D}${sysconfdir}/ssh/ | 21 | install ${UNPACKDIR}/openssh/* ${D}${sysconfdir}/ssh/ |
17 | chmod 0600 ${D}${sysconfdir}/ssh/* | 22 | chmod 0600 ${D}${sysconfdir}/ssh/* |
18 | chmod 0644 ${D}${sysconfdir}/ssh/*.pub | 23 | chmod 0644 ${D}${sysconfdir}/ssh/*.pub |
19 | } \ No newline at end of file | 24 | } |
diff --git a/meta/recipes-core/base-files/base-files/profile b/meta/recipes-core/base-files/base-files/profile index bded3757cc..5e8393c91c 100644 --- a/meta/recipes-core/base-files/base-files/profile +++ b/meta/recipes-core/base-files/base-files/profile | |||
@@ -58,7 +58,7 @@ resize() { | |||
58 | fi | 58 | fi |
59 | # only do this for /dev/tty[A-z] which are typically | 59 | # only do this for /dev/tty[A-z] which are typically |
60 | # serial ports | 60 | # serial ports |
61 | if [ $FIRSTTIMESETUP -eq 1 -a $SHLVL -eq 1 ] ; then | 61 | if [ $FIRSTTIMESETUP -eq 1 -a ${SHLVL:-1} -eq 1 ] ; then |
62 | case $(tty 2>/dev/null) in | 62 | case $(tty 2>/dev/null) in |
63 | /dev/tty[A-z]*) resize >/dev/null;; | 63 | /dev/tty[A-z]*) resize >/dev/null;; |
64 | esac | 64 | esac |
diff --git a/meta/recipes-core/base-files/base-files_3.0.14.bb b/meta/recipes-core/base-files/base-files_3.0.14.bb index 42a8026e63..1b6e9671e4 100644 --- a/meta/recipes-core/base-files/base-files_3.0.14.bb +++ b/meta/recipes-core/base-files/base-files_3.0.14.bb | |||
@@ -25,7 +25,8 @@ SRC_URI = "file://rotation \ | |||
25 | " | 25 | " |
26 | SRC_URI:append:libc-glibc = "${@bb.utils.contains('DISTRO_FEATURES', 'systemd systemd-resolved', ' file://0001-add-nss-resolve-to-nsswitch.patch', '', d)}" | 26 | SRC_URI:append:libc-glibc = "${@bb.utils.contains('DISTRO_FEATURES', 'systemd systemd-resolved', ' file://0001-add-nss-resolve-to-nsswitch.patch', '', d)}" |
27 | 27 | ||
28 | S = "${WORKDIR}" | 28 | S = "${WORKDIR}/sources" |
29 | UNPACKDIR = "${S}" | ||
29 | 30 | ||
30 | INHIBIT_DEFAULT_DEPS = "1" | 31 | INHIBIT_DEFAULT_DEPS = "1" |
31 | 32 | ||
diff --git a/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb b/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb index 1f179d8560..4ffc44c808 100644 --- a/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb +++ b/meta/recipes-core/busybox/busybox-inittab_1.36.1.bb | |||
@@ -4,7 +4,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;m | |||
4 | 4 | ||
5 | SRC_URI = "file://inittab" | 5 | SRC_URI = "file://inittab" |
6 | 6 | ||
7 | S = "${WORKDIR}" | 7 | S = "${WORKDIR}/sources" |
8 | UNPACKDIR = "${S}" | ||
8 | 9 | ||
9 | INHIBIT_DEFAULT_DEPS = "1" | 10 | INHIBIT_DEFAULT_DEPS = "1" |
10 | 11 | ||
diff --git a/meta/recipes-core/dropbear/dropbear/0001-urandom-xauth-changes-to-options.h.patch b/meta/recipes-core/dropbear/dropbear/0001-urandom-xauth-changes-to-options.h.patch index c74f09e484..9c1dd3f606 100644 --- a/meta/recipes-core/dropbear/dropbear/0001-urandom-xauth-changes-to-options.h.patch +++ b/meta/recipes-core/dropbear/dropbear/0001-urandom-xauth-changes-to-options.h.patch | |||
@@ -1,4 +1,7 @@ | |||
1 | Subject: [PATCH 1/6] urandom-xauth-changes-to-options.h | 1 | From cdc6a4a57a86d8116a92a5d905993e65cf723556 Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard@openedhand.com> | ||
3 | Date: Wed, 31 Aug 2005 10:45:47 +0000 | ||
4 | Subject: [PATCH] urandom-xauth-changes-to-options.h | ||
2 | 5 | ||
3 | Upstream-Status: Inappropriate [configuration] | 6 | Upstream-Status: Inappropriate [configuration] |
4 | --- | 7 | --- |
@@ -18,6 +21,3 @@ index 6e970bb..ccc8b47 100644 | |||
18 | 21 | ||
19 | 22 | ||
20 | /* If you want to enable running an sftp server (such as the one included with | 23 | /* If you want to enable running an sftp server (such as the one included with |
21 | -- | ||
22 | 2.34.1 | ||
23 | |||
diff --git a/meta/recipes-core/dropbear/dropbear/0005-dropbear-enable-pam.patch b/meta/recipes-core/dropbear/dropbear/0005-dropbear-enable-pam.patch index fe667ddc25..6743f506e9 100644 --- a/meta/recipes-core/dropbear/dropbear/0005-dropbear-enable-pam.patch +++ b/meta/recipes-core/dropbear/dropbear/0005-dropbear-enable-pam.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From b8cece92ba19aa77ac013ea161bfe4c7147747c9 Mon Sep 17 00:00:00 2001 | 1 | From 253ca01f0fc50dbaeb2ff8bcece0c34256eba94f Mon Sep 17 00:00:00 2001 |
2 | From: Jussi Kukkonen <jussi.kukkonen@intel.com> | 2 | From: Jussi Kukkonen <jussi.kukkonen@intel.com> |
3 | Date: Wed, 2 Dec 2015 11:36:02 +0200 | 3 | Date: Wed, 2 Dec 2015 11:36:02 +0200 |
4 | Subject: Enable pam | 4 | Subject: [PATCH] Enable pam |
5 | 5 | ||
6 | We need modify file default_options.h besides enabling pam in | 6 | We need modify file default_options.h besides enabling pam in |
7 | configure if we want dropbear to support pam. | 7 | configure if we want dropbear to support pam. |
@@ -15,10 +15,10 @@ Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com> | |||
15 | 1 file changed, 2 insertions(+), 2 deletions(-) | 15 | 1 file changed, 2 insertions(+), 2 deletions(-) |
16 | 16 | ||
17 | diff --git a/src/default_options.h b/src/default_options.h | 17 | diff --git a/src/default_options.h b/src/default_options.h |
18 | index 0e3d027..349338c 100644 | 18 | index ccc8b47..12768d1 100644 |
19 | --- a/src/default_options.h | 19 | --- a/src/default_options.h |
20 | +++ b/src/default_options.h | 20 | +++ b/src/default_options.h |
21 | @@ -210,7 +210,7 @@ group1 in Dropbear server too */ | 21 | @@ -228,7 +228,7 @@ group1 in Dropbear server too */ |
22 | 22 | ||
23 | /* Authentication Types - at least one required. | 23 | /* Authentication Types - at least one required. |
24 | RFC Draft requires pubkey auth, and recommends password */ | 24 | RFC Draft requires pubkey auth, and recommends password */ |
@@ -27,7 +27,7 @@ index 0e3d027..349338c 100644 | |||
27 | 27 | ||
28 | /* Note: PAM auth is quite simple and only works for PAM modules which just do | 28 | /* Note: PAM auth is quite simple and only works for PAM modules which just do |
29 | * a simple "Login: " "Password: " (you can edit the strings in svr-authpam.c). | 29 | * a simple "Login: " "Password: " (you can edit the strings in svr-authpam.c). |
30 | @@ -218,7 +218,7 @@ group1 in Dropbear server too */ | 30 | @@ -236,7 +236,7 @@ group1 in Dropbear server too */ |
31 | * but there's an interface via a PAM module. It won't work for more complex | 31 | * but there's an interface via a PAM module. It won't work for more complex |
32 | * PAM challenge/response. | 32 | * PAM challenge/response. |
33 | * You can't enable both PASSWORD and PAM. */ | 33 | * You can't enable both PASSWORD and PAM. */ |
@@ -36,6 +36,3 @@ index 0e3d027..349338c 100644 | |||
36 | 36 | ||
37 | /* ~/.ssh/authorized_keys authentication. | 37 | /* ~/.ssh/authorized_keys authentication. |
38 | * You must define DROPBEAR_SVR_PUBKEY_AUTH in order to use plugins. */ | 38 | * You must define DROPBEAR_SVR_PUBKEY_AUTH in order to use plugins. */ |
39 | -- | ||
40 | 2.25.1 | ||
41 | |||
diff --git a/meta/recipes-core/dropbear/dropbear/0006-dropbear-configuration-file.patch b/meta/recipes-core/dropbear/dropbear/0006-dropbear-configuration-file.patch index f54f634a4e..44861088cc 100644 --- a/meta/recipes-core/dropbear/dropbear/0006-dropbear-configuration-file.patch +++ b/meta/recipes-core/dropbear/dropbear/0006-dropbear-configuration-file.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From e3a5db1b6d3f6382a15b2266458c26c645a10f18 Mon Sep 17 00:00:00 2001 | 1 | From 16b147f97f0938cddb55ec1c90bc919c13f26fc0 Mon Sep 17 00:00:00 2001 |
2 | From: Mingli Yu <Mingli.Yu@windriver.com> | 2 | From: Mingli Yu <Mingli.Yu@windriver.com> |
3 | Date: Thu, 6 Sep 2018 15:54:00 +0800 | 3 | Date: Thu, 6 Sep 2018 15:54:00 +0800 |
4 | Subject: [PATCH] dropbear configuration file | 4 | Subject: [PATCH] dropbear configuration file |
@@ -15,11 +15,11 @@ Signed-off-by: Mingli Yu <Mingli.Yu@windriver.com> | |||
15 | src/svr-authpam.c | 2 +- | 15 | src/svr-authpam.c | 2 +- |
16 | 1 file changed, 1 insertion(+), 1 deletion(-) | 16 | 1 file changed, 1 insertion(+), 1 deletion(-) |
17 | 17 | ||
18 | diff --git a/srec/svr-authpam.c b/src/svr-authpam.c | 18 | diff --git a/src/svr-authpam.c b/src/svr-authpam.c |
19 | index d201bc9..165ec5c 100644 | 19 | index ec14632..026102f 100644 |
20 | --- a/src/svr-authpam.c | 20 | --- a/src/svr-authpam.c |
21 | +++ b/src/svr-authpam.c | 21 | +++ b/src/svr-authpam.c |
22 | @@ -223,7 +223,7 @@ void svr_auth_pam(int valid_user) { | 22 | @@ -224,7 +224,7 @@ void svr_auth_pam(int valid_user) { |
23 | } | 23 | } |
24 | 24 | ||
25 | /* Init pam */ | 25 | /* Init pam */ |
@@ -28,6 +28,3 @@ index d201bc9..165ec5c 100644 | |||
28 | dropbear_log(LOG_WARNING, "pam_start() failed, rc=%d, %s", | 28 | dropbear_log(LOG_WARNING, "pam_start() failed, rc=%d, %s", |
29 | rc, pam_strerror(pamHandlep, rc)); | 29 | rc, pam_strerror(pamHandlep, rc)); |
30 | goto cleanup; | 30 | goto cleanup; |
31 | -- | ||
32 | 2.7.4 | ||
33 | |||
diff --git a/meta/recipes-core/dropbear/dropbear/dropbear-disable-weak-ciphers.patch b/meta/recipes-core/dropbear/dropbear/dropbear-disable-weak-ciphers.patch index f998caa255..a20781d31d 100644 --- a/meta/recipes-core/dropbear/dropbear/dropbear-disable-weak-ciphers.patch +++ b/meta/recipes-core/dropbear/dropbear/dropbear-disable-weak-ciphers.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From c347ece05a7fdbf50d76cb136b9ed45caed333f6 Mon Sep 17 00:00:00 2001 | 1 | From c8a0c8e87b772576f3a431c3b4cacaf5aa001dcc Mon Sep 17 00:00:00 2001 |
2 | From: Joseph Reynolds <joseph.reynolds1@ibm.com> | 2 | From: Joseph Reynolds <joseph.reynolds1@ibm.com> |
3 | Date: Thu, 20 Jun 2019 16:29:15 -0500 | 3 | Date: Thu, 20 Jun 2019 16:29:15 -0500 |
4 | Subject: [PATCH] dropbear: new feature: disable-weak-ciphers | 4 | Subject: [PATCH] dropbear: new feature: disable-weak-ciphers |
@@ -14,10 +14,10 @@ Signed-off-by: Joseph Reynolds <joseph.reynolds1@ibm.com> | |||
14 | 1 file changed, 1 insertion(+), 1 deletion(-) | 14 | 1 file changed, 1 insertion(+), 1 deletion(-) |
15 | 15 | ||
16 | diff --git a/src/default_options.h b/src/default_options.h | 16 | diff --git a/src/default_options.h b/src/default_options.h |
17 | index d417588..bc5200f 100644 | 17 | index 12768d1..2b07497 100644 |
18 | --- a/src/default_options.h | 18 | --- a/src/default_options.h |
19 | +++ b/src/default_options.h | 19 | +++ b/src/default_options.h |
20 | @@ -180,7 +180,7 @@ IMPORTANT: Some options will require "make clean" after changes */ | 20 | @@ -197,7 +197,7 @@ IMPORTANT: Some options will require "make clean" after changes */ |
21 | * Small systems should generally include either curve25519 or ecdh for performance. | 21 | * Small systems should generally include either curve25519 or ecdh for performance. |
22 | * curve25519 is less widely supported but is faster | 22 | * curve25519 is less widely supported but is faster |
23 | */ | 23 | */ |
@@ -26,6 +26,3 @@ index d417588..bc5200f 100644 | |||
26 | #define DROPBEAR_DH_GROUP14_SHA256 1 | 26 | #define DROPBEAR_DH_GROUP14_SHA256 1 |
27 | #define DROPBEAR_DH_GROUP16 0 | 27 | #define DROPBEAR_DH_GROUP16 0 |
28 | #define DROPBEAR_CURVE25519 1 | 28 | #define DROPBEAR_CURVE25519 1 |
29 | -- | ||
30 | 2.25.1 | ||
31 | |||
diff --git a/meta/recipes-core/dropbear/dropbear_2024.84.bb b/meta/recipes-core/dropbear/dropbear_2024.85.bb index 3ea64b13d0..54001414d0 100644 --- a/meta/recipes-core/dropbear/dropbear_2024.84.bb +++ b/meta/recipes-core/dropbear/dropbear_2024.85.bb | |||
@@ -23,7 +23,7 @@ SRC_URI = "http://matt.ucc.asn.au/dropbear/releases/dropbear-${PV}.tar.bz2 \ | |||
23 | ${@bb.utils.contains('PACKAGECONFIG', 'disable-weak-ciphers', 'file://dropbear-disable-weak-ciphers.patch', '', d)} \ | 23 | ${@bb.utils.contains('PACKAGECONFIG', 'disable-weak-ciphers', 'file://dropbear-disable-weak-ciphers.patch', '', d)} \ |
24 | " | 24 | " |
25 | 25 | ||
26 | SRC_URI[sha256sum] = "16e22b66b333d6b7e504c43679d04ed6ca30f2838db40a21f935c850dfc01009" | 26 | SRC_URI[sha256sum] = "86b036c433a69d89ce51ebae335d65c47738ccf90d13e5eb0fea832e556da502" |
27 | 27 | ||
28 | PAM_SRC_URI = "file://0005-dropbear-enable-pam.patch \ | 28 | PAM_SRC_URI = "file://0005-dropbear-enable-pam.patch \ |
29 | file://0006-dropbear-configuration-file.patch \ | 29 | file://0006-dropbear-configuration-file.patch \ |
diff --git a/meta/recipes-core/ell/ell_0.64.bb b/meta/recipes-core/ell/ell_0.65.bb index c4f16905fd..9b559caf8a 100644 --- a/meta/recipes-core/ell/ell_0.64.bb +++ b/meta/recipes-core/ell/ell_0.65.bb | |||
@@ -15,8 +15,4 @@ DEPENDS = "dbus" | |||
15 | inherit autotools pkgconfig | 15 | inherit autotools pkgconfig |
16 | 16 | ||
17 | SRC_URI = "https://mirrors.edge.kernel.org/pub/linux/libs/${BPN}/${BPN}-${PV}.tar.xz" | 17 | SRC_URI = "https://mirrors.edge.kernel.org/pub/linux/libs/${BPN}/${BPN}-${PV}.tar.xz" |
18 | SRC_URI[sha256sum] = "760f3901078409f66cadf1bb24c8bdc60f13d53f6dd66b88631221d2494f8405" | 18 | SRC_URI[sha256sum] = "9ee7ac57b188d391cead705d3596a6d3240341786475149db297782a52269aa5" |
19 | |||
20 | do_configure:prepend () { | ||
21 | mkdir -p ${S}/build-aux | ||
22 | } | ||
diff --git a/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb b/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb index db1bbb7e68..96acf9bb9f 100644 --- a/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb +++ b/meta/recipes-core/gettext/gettext-minimal-native_0.22.5.bb | |||
@@ -13,7 +13,7 @@ INHIBIT_DEFAULT_DEPS = "1" | |||
13 | INHIBIT_AUTOTOOLS_DEPS = "1" | 13 | INHIBIT_AUTOTOOLS_DEPS = "1" |
14 | 14 | ||
15 | LICENSE = "FSF-Unlimited" | 15 | LICENSE = "FSF-Unlimited" |
16 | LIC_FILES_CHKSUM = "file://../COPYING;md5=4bd090a20bfcd1a18f1f79837b5e3e91" | 16 | LIC_FILES_CHKSUM = "file://${UNPACKDIR}/COPYING;md5=4bd090a20bfcd1a18f1f79837b5e3e91" |
17 | 17 | ||
18 | inherit native | 18 | inherit native |
19 | 19 | ||
diff --git a/meta/recipes-core/gettext/gettext/0001-intl-Fix-build-failure-with-make-j.patch b/meta/recipes-core/gettext/gettext/0001-intl-Fix-build-failure-with-make-j.patch new file mode 100644 index 0000000000..144259dd3f --- /dev/null +++ b/meta/recipes-core/gettext/gettext/0001-intl-Fix-build-failure-with-make-j.patch | |||
@@ -0,0 +1,35 @@ | |||
1 | From 97a6a63ad61949663283f5fad68c9d5fb9be1f15 Mon Sep 17 00:00:00 2001 | ||
2 | From: Bruno Haible <bruno@clisp.org> | ||
3 | Date: Tue, 12 Sep 2023 11:33:41 +0200 | ||
4 | Subject: [PATCH] intl: Fix build failure with "make -j". | ||
5 | |||
6 | Reported by Christian Weisgerber <naddy@mips.inka.de> at | ||
7 | <https://lists.gnu.org/archive/html/bug-gettext/2023-09/msg00005.html>. | ||
8 | |||
9 | * gettext-runtime/intl/Makefile.am (langprefs.lo, log.lo): Depend on gettextP.h | ||
10 | and its subordinate includes. | ||
11 | |||
12 | Upstream-Status: Backport [https://git.savannah.gnu.org/gitweb/?p=gettext.git;a=commit;h=97a6a63ad61949663283f5fad68c9d5fb9be1f15] | ||
13 | Signed-off-by: Changqing Li <changqing.li@windriver.com> | ||
14 | --- | ||
15 | gettext-runtime/intl/Makefile.am | 4 ++-- | ||
16 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
17 | |||
18 | diff --git a/gettext-runtime/intl/Makefile.am b/gettext-runtime/intl/Makefile.am | ||
19 | index da7abb758..9e56978bc 100644 | ||
20 | --- a/gettext-runtime/intl/Makefile.am | ||
21 | +++ b/gettext-runtime/intl/Makefile.am | ||
22 | @@ -387,8 +387,8 @@ dngettext.lo: ../config.h $(srcdir)/gettextP.h libgnuintl.h $(srcdir)/gmo | ||
23 | ngettext.lo: ../config.h $(srcdir)/gettextP.h libgnuintl.h $(srcdir)/gmo.h $(srcdir)/loadinfo.h | ||
24 | plural.lo: ../config.h $(srcdir)/plural-exp.h $(PLURAL_DEPS) | ||
25 | plural-exp.lo: ../config.h $(srcdir)/plural-exp.h | ||
26 | -langprefs.lo: ../config.h | ||
27 | -log.lo: ../config.h | ||
28 | +langprefs.lo: ../config.h $(srcdir)/gettextP.h libgnuintl.h $(srcdir)/gmo.h $(srcdir)/loadinfo.h | ||
29 | +log.lo: ../config.h $(srcdir)/gettextP.h libgnuintl.h $(srcdir)/gmo.h $(srcdir)/loadinfo.h | ||
30 | printf.lo: ../config.h | ||
31 | setlocale.lo: ../config.h $(srcdir)/gettextP.h libgnuintl.h $(srcdir)/gmo.h $(srcdir)/loadinfo.h | ||
32 | version.lo: ../config.h libgnuintl.h | ||
33 | -- | ||
34 | 2.25.1 | ||
35 | |||
diff --git a/meta/recipes-core/gettext/gettext_0.22.5.bb b/meta/recipes-core/gettext/gettext_0.22.5.bb index 1a66d37916..7eeb1a86fd 100644 --- a/meta/recipes-core/gettext/gettext_0.22.5.bb +++ b/meta/recipes-core/gettext/gettext_0.22.5.bb | |||
@@ -28,6 +28,7 @@ SRC_URI += " \ | |||
28 | file://serial-tests-config.patch \ | 28 | file://serial-tests-config.patch \ |
29 | file://0001-tests-autopoint-3-unset-MAKEFLAGS.patch \ | 29 | file://0001-tests-autopoint-3-unset-MAKEFLAGS.patch \ |
30 | file://0001-init-env.in-do-not-add-C-CXX-parameters.patch \ | 30 | file://0001-init-env.in-do-not-add-C-CXX-parameters.patch \ |
31 | file://0001-intl-Fix-build-failure-with-make-j.patch \ | ||
31 | " | 32 | " |
32 | 33 | ||
33 | inherit autotools texinfo pkgconfig ptest | 34 | inherit autotools texinfo pkgconfig ptest |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch b/meta/recipes-core/glib-2.0/files/0001-Do-not-write-bindir-into-pkg-config-files.patch index 8e6598fbef..10568b7c9f 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Do-not-write-bindir-into-pkg-config-files.patch +++ b/meta/recipes-core/glib-2.0/files/0001-Do-not-write-bindir-into-pkg-config-files.patch | |||
@@ -1,26 +1,30 @@ | |||
1 | From e7077aa23bfcd31a8e72e39dc93ce4f854678376 Mon Sep 17 00:00:00 2001 | 1 | From 10b08af6c7dcb03f954da29b6c4f9636b8796f30 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Fri, 15 Feb 2019 11:17:27 +0100 | 3 | Date: Fri, 15 Feb 2019 11:17:27 +0100 |
4 | Subject: [PATCH] Do not write $bindir into pkg-config files | 4 | Subject: [PATCH] Do not prefix executables with $bindir in pkg-config files |
5 | 5 | ||
6 | This would otherwise break when using the files to build other target | 6 | This would otherwise break when using the executables to build other target |
7 | components (we need to rely on PATH containing the paths to utilities, | 7 | components (we need to rely on PATH containing the paths to utilities, |
8 | rather than use target paths). | 8 | rather than use target paths). |
9 | 9 | ||
10 | Upstream-Status: Inappropriate [upstream wants the paths in .pc files] | 10 | Upstream-Status: Inappropriate [upstream wants the paths in .pc files] |
11 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | 11 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> |
12 | --- | 12 | --- |
13 | gio/meson.build | 16 ++++++++-------- | 13 | gio/meson.build | 17 +++++++++-------- |
14 | glib/meson.build | 6 +++--- | 14 | glib/meson.build | 7 ++++--- |
15 | 2 files changed, 11 insertions(+), 11 deletions(-) | 15 | 2 files changed, 13 insertions(+), 11 deletions(-) |
16 | 16 | ||
17 | diff --git a/gio/meson.build b/gio/meson.build | 17 | diff --git a/gio/meson.build b/gio/meson.build |
18 | index 5f91586..1a95f4f 100644 | 18 | index 77cba7b..25d709e 100644 |
19 | --- a/gio/meson.build | 19 | --- a/gio/meson.build |
20 | +++ b/gio/meson.build | 20 | +++ b/gio/meson.build |
21 | @@ -884,14 +884,14 @@ pkg.generate(libgio, | 21 | @@ -883,17 +883,18 @@ libgio_dep = declare_dependency(link_with : libgio, |
22 | pkg.generate(libgio, | ||
23 | requires : ['glib-2.0', 'gobject-2.0'], | ||
24 | variables : [ | ||
25 | + 'bindir=' + '${prefix}' / get_option('bindir'), | ||
26 | 'schemasdir=' + '${datadir}' / schemas_subdir, | ||
22 | 'dtdsdir=' + '${datadir}' / dtds_subdir, | 27 | 'dtdsdir=' + '${datadir}' / dtds_subdir, |
23 | 'bindir=' + '${prefix}' / get_option('bindir'), | ||
24 | 'giomoduledir=' + pkgconfig_giomodulesdir, | 28 | 'giomoduledir=' + pkgconfig_giomodulesdir, |
25 | - 'gio=' + '${bindir}' / 'gio', | 29 | - 'gio=' + '${bindir}' / 'gio', |
26 | - 'gio_querymodules=' + pkgconfig_multiarch_bindir / 'gio-querymodules', | 30 | - 'gio_querymodules=' + pkgconfig_multiarch_bindir / 'gio-querymodules', |
@@ -42,16 +46,17 @@ index 5f91586..1a95f4f 100644 | |||
42 | version : glib_version, | 46 | version : glib_version, |
43 | install_dir : glib_pkgconfigreldir, | 47 | install_dir : glib_pkgconfigreldir, |
44 | diff --git a/glib/meson.build b/glib/meson.build | 48 | diff --git a/glib/meson.build b/glib/meson.build |
45 | index c26a35e..1d8ca6b 100644 | 49 | index d2efeba..5f5841d 100644 |
46 | --- a/glib/meson.build | 50 | --- a/glib/meson.build |
47 | +++ b/glib/meson.build | 51 | +++ b/glib/meson.build |
48 | @@ -447,9 +447,9 @@ pkg.generate(libglib, | 52 | @@ -447,9 +447,10 @@ pkg.generate(libglib, |
53 | subdirs : ['glib-2.0'], | ||
54 | extra_cflags : ['-I${libdir}/glib-2.0/include'] + win32_cflags, | ||
49 | variables : [ | 55 | variables : [ |
50 | 'bindir=' + '${prefix}' / get_option('bindir'), | ||
51 | 'datadir=' + '${prefix}' / get_option('datadir'), | ||
52 | - 'glib_genmarshal=' + '${bindir}' / 'glib-genmarshal', | 56 | - 'glib_genmarshal=' + '${bindir}' / 'glib-genmarshal', |
53 | - 'gobject_query=' + '${bindir}' / 'gobject-query', | 57 | - 'gobject_query=' + '${bindir}' / 'gobject-query', |
54 | - 'glib_mkenums=' + '${bindir}' / 'glib-mkenums', | 58 | - 'glib_mkenums=' + '${bindir}' / 'glib-mkenums', |
59 | + 'bindir=' + '${prefix}' / get_option('bindir'), | ||
55 | + 'glib_genmarshal=glib-genmarshal', | 60 | + 'glib_genmarshal=glib-genmarshal', |
56 | + 'gobject_query=gobject-query', | 61 | + 'gobject_query=gobject-query', |
57 | + 'glib_mkenums=glib-mkenums', | 62 | + 'glib_mkenums=glib-mkenums', |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch b/meta/recipes-core/glib-2.0/files/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch index eb9dfdbcf9..b9c9706fc4 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch +++ b/meta/recipes-core/glib-2.0/files/0001-Fix-DATADIRNAME-on-uclibc-Linux.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 9a5d4bf65b658d744d610ee27ecd2ae65b14b158 Mon Sep 17 00:00:00 2001 | 1 | From 55c49c51d8db5af15132653003d2b65a5215eebf Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sat, 15 Mar 2014 22:42:29 -0700 | 3 | Date: Sat, 15 Mar 2014 22:42:29 -0700 |
4 | Subject: [PATCH] Fix DATADIRNAME on uclibc/Linux | 4 | Subject: [PATCH] Fix DATADIRNAME on uclibc/Linux |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch b/meta/recipes-core/glib-2.0/files/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch index ad69f7ec65..bc539fe3e8 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch +++ b/meta/recipes-core/glib-2.0/files/0001-Install-gio-querymodules-as-libexec_PROGRAM.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 4933aef791857a5aac650b60af800778658b875b Mon Sep 17 00:00:00 2001 | 1 | From 53333cf3ec787cb7e60585237327390e2ca89f4c Mon Sep 17 00:00:00 2001 |
2 | From: Jussi Kukkonen <jussi.kukkonen@intel.com> | 2 | From: Jussi Kukkonen <jussi.kukkonen@intel.com> |
3 | Date: Tue, 22 Mar 2016 15:14:58 +0200 | 3 | Date: Tue, 22 Mar 2016 15:14:58 +0200 |
4 | Subject: [PATCH] Install gio-querymodules as libexec_PROGRAM | 4 | Subject: [PATCH] Install gio-querymodules as libexec_PROGRAM |
@@ -13,10 +13,10 @@ Upstream-Status: Inappropriate [OE specific] | |||
13 | 1 file changed, 1 insertion(+) | 13 | 1 file changed, 1 insertion(+) |
14 | 14 | ||
15 | diff --git a/gio/meson.build b/gio/meson.build | 15 | diff --git a/gio/meson.build b/gio/meson.build |
16 | index f9fdf6e..5f91586 100644 | 16 | index 59c2b0f..77cba7b 100644 |
17 | --- a/gio/meson.build | 17 | --- a/gio/meson.build |
18 | +++ b/gio/meson.build | 18 | +++ b/gio/meson.build |
19 | @@ -1005,6 +1005,7 @@ gio_querymodules = executable('gio-querymodules', 'gio-querymodules.c', 'giomodu | 19 | @@ -1007,6 +1007,7 @@ gio_querymodules = executable('gio-querymodules', 'gio-querymodules.c', 'giomodu |
20 | c_args : gio_c_args, | 20 | c_args : gio_c_args, |
21 | # intl.lib is not compatible with SAFESEH | 21 | # intl.lib is not compatible with SAFESEH |
22 | link_args : noseh_link_args, | 22 | link_args : noseh_link_args, |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch b/meta/recipes-core/glib-2.0/files/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch index 0e3a62af6a..5e543339d8 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch +++ b/meta/recipes-core/glib-2.0/files/0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 8ae2e9c2a04e089306693a021149dc6b7d1bd679 Mon Sep 17 00:00:00 2001 | 1 | From 3db055ce8029372096be534c5cfc385f068bab17 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Fri, 12 Jun 2015 17:08:46 +0300 | 3 | Date: Fri, 12 Jun 2015 17:08:46 +0300 |
4 | Subject: [PATCH] Remove the warning about deprecated paths in schemas | 4 | Subject: [PATCH] Remove the warning about deprecated paths in schemas |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch b/meta/recipes-core/glib-2.0/files/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch index aee2986033..aa7127b65b 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch +++ b/meta/recipes-core/glib-2.0/files/0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 878e51f82100c698236fda0e069e14ea9249350c Mon Sep 17 00:00:00 2001 | 1 | From 97b4f18c65c52c9e6412ecf8affc22f6f42d3465 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Wed, 8 Jan 2020 18:22:46 +0100 | 3 | Date: Wed, 8 Jan 2020 18:22:46 +0100 |
4 | Subject: [PATCH] gio/tests/resources.c: comment out a build host-only test | 4 | Subject: [PATCH] gio/tests/resources.c: comment out a build host-only test |
@@ -13,7 +13,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | |||
13 | 1 file changed, 1 insertion(+), 1 deletion(-) | 13 | 1 file changed, 1 insertion(+), 1 deletion(-) |
14 | 14 | ||
15 | diff --git a/gio/tests/resources.c b/gio/tests/resources.c | 15 | diff --git a/gio/tests/resources.c b/gio/tests/resources.c |
16 | index f567914..b21b616 100644 | 16 | index f7dc039..f708876 100644 |
17 | --- a/gio/tests/resources.c | 17 | --- a/gio/tests/resources.c |
18 | +++ b/gio/tests/resources.c | 18 | +++ b/gio/tests/resources.c |
19 | @@ -1068,7 +1068,7 @@ main (int argc, | 19 | @@ -1068,7 +1068,7 @@ main (int argc, |
diff --git a/meta/recipes-core/glib-2.0/files/0001-girepository-introspection-correctly-install-.gir-fi.patch b/meta/recipes-core/glib-2.0/files/0001-girepository-introspection-correctly-install-.gir-fi.patch new file mode 100644 index 0000000000..7be9cd8b84 --- /dev/null +++ b/meta/recipes-core/glib-2.0/files/0001-girepository-introspection-correctly-install-.gir-fi.patch | |||
@@ -0,0 +1,88 @@ | |||
1 | From f1beef5c2d09fae3a5e5a314f984cb3f20abe732 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alexander Kanavin <alex@linutronix.de> | ||
3 | Date: Tue, 23 Apr 2024 11:24:34 +0200 | ||
4 | Subject: [PATCH] girepository/introspection: correctly install .gir files into | ||
5 | custom locations provided via meson option | ||
6 | |||
7 | Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/glib/-/merge_requests/4020] | ||
8 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | ||
9 | --- | ||
10 | girepository/introspection/meson.build | 9 +++++++++ | ||
11 | 1 file changed, 9 insertions(+) | ||
12 | |||
13 | diff --git a/girepository/introspection/meson.build b/girepository/introspection/meson.build | ||
14 | index 9405686..862ca61 100644 | ||
15 | --- a/girepository/introspection/meson.build | ||
16 | +++ b/girepository/introspection/meson.build | ||
17 | @@ -49,6 +49,7 @@ glib_gir = gnome.generate_gir(libglib, | ||
18 | export_packages: 'glib-2.0', | ||
19 | header: 'glib.h', | ||
20 | install: true, | ||
21 | + install_dir_gir: glib_girdir, | ||
22 | dependencies: [ | ||
23 | libgobject_dep, | ||
24 | ], | ||
25 | @@ -76,6 +77,7 @@ if host_system == 'windows' | ||
26 | header: 'glib.h', | ||
27 | includes: [ glib_gir[0] ], | ||
28 | install: true, | ||
29 | + install_dir_gir: glib_girdir, | ||
30 | dependencies: [ | ||
31 | libgobject_dep, | ||
32 | ], | ||
33 | @@ -104,6 +106,7 @@ else | ||
34 | header: 'glib.h', | ||
35 | includes: [ glib_gir[0] ], | ||
36 | install: true, | ||
37 | + install_dir_gir: glib_girdir, | ||
38 | dependencies: [ | ||
39 | libgobject_dep, | ||
40 | ], | ||
41 | @@ -137,6 +140,7 @@ gobject_gir = gnome.generate_gir(libgobject, | ||
42 | header: 'glib-object.h', | ||
43 | includes: [ glib_gir[0] ], | ||
44 | install: true, | ||
45 | + install_dir_gir: glib_girdir, | ||
46 | env: gi_gen_env_variables, | ||
47 | extra_args: gir_args + [ | ||
48 | '-DGOBJECT_COMPILATION', | ||
49 | @@ -162,6 +166,7 @@ gmodule_gir = gnome.generate_gir(libgmodule, | ||
50 | header: 'gmodule.h', | ||
51 | includes: [ glib_gir[0] ], | ||
52 | install: true, | ||
53 | + install_dir_gir: glib_girdir, | ||
54 | dependencies: [ | ||
55 | libglib_dep, | ||
56 | ], | ||
57 | @@ -224,6 +229,7 @@ gio_gir = gnome.generate_gir(libgio, | ||
58 | header: 'gio/gio.h', | ||
59 | includes: [ glib_gir[0], gmodule_gir[0], gobject_gir[0] ], | ||
60 | install: true, | ||
61 | + install_dir_gir: glib_girdir, | ||
62 | dependencies: [ | ||
63 | libglib_dep, | ||
64 | libgobject_dep, | ||
65 | @@ -249,6 +255,7 @@ if host_system == 'windows' | ||
66 | header: 'gio/gio.h', | ||
67 | includes: [ glib_gir[0], gmodule_gir[0], gobject_gir[0], gio_gir[0] ], | ||
68 | install: true, | ||
69 | + install_dir_gir: glib_girdir, | ||
70 | dependencies: [ | ||
71 | libglib_dep, | ||
72 | libgobject_dep, | ||
73 | @@ -277,6 +284,7 @@ else | ||
74 | header: 'gio/gio.h', | ||
75 | includes: [ glib_gir[0], gmodule_gir[0], gobject_gir[0], gio_gir[0] ], | ||
76 | install: true, | ||
77 | + install_dir_gir: glib_girdir, | ||
78 | dependencies: [ | ||
79 | libglib_dep, | ||
80 | libgobject_dep, | ||
81 | @@ -314,6 +322,7 @@ girepository_gir = gnome.generate_gir(libgirepository, | ||
82 | header: 'girepository/girepository.h', | ||
83 | includes: [ glib_gir[0], gmodule_gir[0], gobject_gir[0], gio_gir[0] ], | ||
84 | install: true, | ||
85 | + install_dir_gir: glib_girdir, | ||
86 | dependencies: [ libglib_dep, libgobject_dep, libgmodule_dep, libgio_dep ], | ||
87 | extra_args: gir_args + libgirepository_gir_args, | ||
88 | ) | ||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch b/meta/recipes-core/glib-2.0/files/0001-meson-Run-atomics-test-on-clang-as-well.patch index 0b10269114..3a8d3596b0 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson-Run-atomics-test-on-clang-as-well.patch +++ b/meta/recipes-core/glib-2.0/files/0001-meson-Run-atomics-test-on-clang-as-well.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From b4b523160ef550a53705fcc45ac6e10d086ce491 Mon Sep 17 00:00:00 2001 | 1 | From f04ea29f9f96892ada81bd0acfcef70183138229 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sat, 12 Oct 2019 17:46:26 -0700 | 3 | Date: Sat, 12 Oct 2019 17:46:26 -0700 |
4 | Subject: [PATCH] meson: Run atomics test on clang as well | 4 | Subject: [PATCH] meson: Run atomics test on clang as well |
@@ -14,10 +14,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
14 | 1 file changed, 1 insertion(+), 1 deletion(-) | 14 | 1 file changed, 1 insertion(+), 1 deletion(-) |
15 | 15 | ||
16 | diff --git a/meson.build b/meson.build | 16 | diff --git a/meson.build b/meson.build |
17 | index 6ee775e..8bc5fa7 100644 | 17 | index 257afb5..5caa6e6 100644 |
18 | --- a/meson.build | 18 | --- a/meson.build |
19 | +++ b/meson.build | 19 | +++ b/meson.build |
20 | @@ -1938,7 +1938,7 @@ atomicdefine = ''' | 20 | @@ -2024,7 +2024,7 @@ atomicdefine = ''' |
21 | # We know that we can always use real ("lock free") atomic operations with MSVC | 21 | # We know that we can always use real ("lock free") atomic operations with MSVC |
22 | if cc.get_id() == 'msvc' or cc.get_id() == 'clang-cl' or cc.links(atomictest, name : 'atomic ops') | 22 | if cc.get_id() == 'msvc' or cc.get_id() == 'clang-cl' or cc.links(atomictest, name : 'atomic ops') |
23 | have_atomic_lock_free = true | 23 | have_atomic_lock_free = true |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch b/meta/recipes-core/glib-2.0/files/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch index 14dcb278e0..9be04960aa 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch +++ b/meta/recipes-core/glib-2.0/files/0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 294f3e6e9a0a9f4733e85ed6810d1b743055370b Mon Sep 17 00:00:00 2001 | 1 | From a63c865aa9a24f3230e8e2bcb5bce88f179c7e2b Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex@linutronix.de> | 2 | From: Alexander Kanavin <alex@linutronix.de> |
3 | Date: Sat, 16 Sep 2023 22:28:27 +0200 | 3 | Date: Sat, 16 Sep 2023 22:28:27 +0200 |
4 | Subject: [PATCH] meson.build: do not enable pidfd features on native glib | 4 | Subject: [PATCH] meson.build: do not enable pidfd features on native glib |
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de> | |||
14 | 1 file changed, 2 insertions(+), 1 deletion(-) | 14 | 1 file changed, 2 insertions(+), 1 deletion(-) |
15 | 15 | ||
16 | diff --git a/meson.build b/meson.build | 16 | diff --git a/meson.build b/meson.build |
17 | index 8bc5fa7..df1fa60 100644 | 17 | index 5caa6e6..688f214 100644 |
18 | --- a/meson.build | 18 | --- a/meson.build |
19 | +++ b/meson.build | 19 | +++ b/meson.build |
20 | @@ -981,7 +981,8 @@ if cc.links('''#include <sys/syscall.h> | 20 | @@ -1022,7 +1022,8 @@ if cc.links('''#include <sys/syscall.h> |
21 | waitid (P_PIDFD, 0, &child_info, WEXITED | WNOHANG); | 21 | waitid (P_PIDFD, 0, &child_info, WEXITED | WNOHANG); |
22 | return 0; | 22 | return 0; |
23 | }''', name : 'pidfd_open(2) system call') | 23 | }''', name : 'pidfd_open(2) system call') |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch b/meta/recipes-core/glib-2.0/files/0010-Do-not-hardcode-python-path-into-various-tools.patch index 6dff5179c7..4dec252c1f 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/0010-Do-not-hardcode-python-path-into-various-tools.patch +++ b/meta/recipes-core/glib-2.0/files/0010-Do-not-hardcode-python-path-into-various-tools.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 50636758c73e5e61212a8f801c6c602b8aab5ba7 Mon Sep 17 00:00:00 2001 | 1 | From dc7f09c4f52638a70768c528d186da6f83dedc97 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Tue, 3 Oct 2017 10:45:55 +0300 | 3 | Date: Tue, 3 Oct 2017 10:45:55 +0300 |
4 | Subject: [PATCH] Do not hardcode python path into various tools | 4 | Subject: [PATCH] Do not hardcode python path into various tools |
@@ -32,7 +32,7 @@ index aa5af43..56e8e2e 100755 | |||
32 | # pylint: disable=too-many-lines, missing-docstring, invalid-name | 32 | # pylint: disable=too-many-lines, missing-docstring, invalid-name |
33 | 33 | ||
34 | diff --git a/gobject/glib-mkenums.in b/gobject/glib-mkenums.in | 34 | diff --git a/gobject/glib-mkenums.in b/gobject/glib-mkenums.in |
35 | index 353e53a..8ed6c39 100755 | 35 | index e10b910..4b619f8 100755 |
36 | --- a/gobject/glib-mkenums.in | 36 | --- a/gobject/glib-mkenums.in |
37 | +++ b/gobject/glib-mkenums.in | 37 | +++ b/gobject/glib-mkenums.in |
38 | @@ -1,4 +1,4 @@ | 38 | @@ -1,4 +1,4 @@ |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common b/meta/recipes-core/glib-2.0/files/meson.cross.d/common index 0d7c5fa3f8..0d7c5fa3f8 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common +++ b/meta/recipes-core/glib-2.0/files/meson.cross.d/common | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-glibc index 3049e5116e..3049e5116e 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-glibc +++ b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-glibc | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-linux b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-linux index adad7e62ee..adad7e62ee 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-linux +++ b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-linux | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-mingw b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-mingw index 75f911ba1e..75f911ba1e 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-mingw +++ b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-mingw | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-musl b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-musl index 3049e5116e..3049e5116e 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/meson.cross.d/common-musl +++ b/meta/recipes-core/glib-2.0/files/meson.cross.d/common-musl | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch b/meta/recipes-core/glib-2.0/files/relocate-modules.patch index 3e79bbf679..a1ff198aa6 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/relocate-modules.patch +++ b/meta/recipes-core/glib-2.0/files/relocate-modules.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From f40e89b3852df37959606ee13b1a14ade81fa886 Mon Sep 17 00:00:00 2001 | 1 | From 0133aeabd37a9137722abd86039d7d0797b5896f Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@intel.com> | 2 | From: Ross Burton <ross.burton@intel.com> |
3 | Date: Fri, 11 Mar 2016 15:35:55 +0000 | 3 | Date: Fri, 11 Mar 2016 15:35:55 +0000 |
4 | Subject: [PATCH] glib-2.0: relocate the GIO module directory for native builds | 4 | Subject: [PATCH] glib-2.0: relocate the GIO module directory for native builds |
@@ -18,10 +18,10 @@ Signed-off-by: Jussi Kukkonen <jussi.kukkonen@intel.com> | |||
18 | 1 file changed, 7 deletions(-) | 18 | 1 file changed, 7 deletions(-) |
19 | 19 | ||
20 | diff --git a/gio/giomodule.c b/gio/giomodule.c | 20 | diff --git a/gio/giomodule.c b/gio/giomodule.c |
21 | index 17fabe6..8021208 100644 | 21 | index 1e14955..3c09bb5 100644 |
22 | --- a/gio/giomodule.c | 22 | --- a/gio/giomodule.c |
23 | +++ b/gio/giomodule.c | 23 | +++ b/gio/giomodule.c |
24 | @@ -1271,11 +1271,6 @@ get_gio_module_dir (void) | 24 | @@ -1259,11 +1259,6 @@ get_gio_module_dir (void) |
25 | g_free (install_dir); | 25 | g_free (install_dir); |
26 | #else | 26 | #else |
27 | module_dir = g_strdup (GIO_MODULE_DIR); | 27 | module_dir = g_strdup (GIO_MODULE_DIR); |
@@ -33,7 +33,7 @@ index 17fabe6..8021208 100644 | |||
33 | #include <dlfcn.h> | 33 | #include <dlfcn.h> |
34 | { | 34 | { |
35 | g_autofree gchar *path = NULL; | 35 | g_autofree gchar *path = NULL; |
36 | @@ -1294,8 +1289,6 @@ get_gio_module_dir (void) | 36 | @@ -1282,8 +1277,6 @@ get_gio_module_dir (void) |
37 | } | 37 | } |
38 | } | 38 | } |
39 | } | 39 | } |
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/run-ptest b/meta/recipes-core/glib-2.0/files/run-ptest index 831bc3b91f..831bc3b91f 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/run-ptest +++ b/meta/recipes-core/glib-2.0/files/run-ptest | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch b/meta/recipes-core/glib-2.0/files/skip-timeout.patch index cd5ac287c3..0f8b51294b 100644 --- a/meta/recipes-core/glib-2.0/glib-2.0/skip-timeout.patch +++ b/meta/recipes-core/glib-2.0/files/skip-timeout.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From bb11d1a4ae77d93ec0743e54077cf0f990243fa6 Mon Sep 17 00:00:00 2001 | 1 | From ac75f5d9206e52eec64fef0db4cd91b58a764c99 Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@arm.com> | 2 | From: Ross Burton <ross.burton@arm.com> |
3 | Date: Thu, 28 Mar 2024 16:27:09 +0000 | 3 | Date: Thu, 28 Mar 2024 16:27:09 +0000 |
4 | Subject: [PATCH] Skip /timeout/rounding test | 4 | Subject: [PATCH] Skip /timeout/rounding test |
@@ -16,7 +16,7 @@ Signed-off-by: Ross Burton <ross.burton@arm.com> | |||
16 | 1 file changed, 1 deletion(-) | 16 | 1 file changed, 1 deletion(-) |
17 | 17 | ||
18 | diff --git a/glib/tests/timeout.c b/glib/tests/timeout.c | 18 | diff --git a/glib/tests/timeout.c b/glib/tests/timeout.c |
19 | index 1ae3f3a34..85a715b0f 100644 | 19 | index 1ae3f3a..85a715b 100644 |
20 | --- a/glib/tests/timeout.c | 20 | --- a/glib/tests/timeout.c |
21 | +++ b/glib/tests/timeout.c | 21 | +++ b/glib/tests/timeout.c |
22 | @@ -214,7 +214,6 @@ main (int argc, char *argv[]) | 22 | @@ -214,7 +214,6 @@ main (int argc, char *argv[]) |
@@ -27,6 +27,3 @@ index 1ae3f3a34..85a715b0f 100644 | |||
27 | 27 | ||
28 | return g_test_run (); | 28 | return g_test_run (); |
29 | } | 29 | } |
30 | -- | ||
31 | 2.34.1 | ||
32 | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0-initial_2.80.2.bb b/meta/recipes-core/glib-2.0/glib-2.0-initial_2.80.2.bb new file mode 100644 index 0000000000..7da2838265 --- /dev/null +++ b/meta/recipes-core/glib-2.0/glib-2.0-initial_2.80.2.bb | |||
@@ -0,0 +1,5 @@ | |||
1 | require glib.inc | ||
2 | PACKAGES = "" | ||
3 | PACKAGECONFIG = "" | ||
4 | |||
5 | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch deleted file mode 100644 index 32b4cea409..0000000000 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Set-host_machine-correctly-when-building-with-mingw3.patch +++ /dev/null | |||
@@ -1,80 +0,0 @@ | |||
1 | From c0733f7a91dfe13152abc60c5a3064456b3e9d63 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | ||
3 | Date: Wed, 13 Feb 2019 15:32:05 +0100 | ||
4 | Subject: [PATCH] Set host_machine correctly when building with mingw32 | ||
5 | |||
6 | Upstream-Status: Inappropriate [oe-core specific] | ||
7 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | ||
8 | --- | ||
9 | gio/tests/meson.build | 8 ++++---- | ||
10 | glib/tests/meson.build | 2 +- | ||
11 | meson.build | 3 +++ | ||
12 | 3 files changed, 8 insertions(+), 5 deletions(-) | ||
13 | |||
14 | diff --git a/gio/tests/meson.build b/gio/tests/meson.build | ||
15 | index 4ef3343..e498e7e 100644 | ||
16 | --- a/gio/tests/meson.build | ||
17 | +++ b/gio/tests/meson.build | ||
18 | @@ -29,7 +29,7 @@ endif | ||
19 | |||
20 | test_cpp_args = test_c_args | ||
21 | |||
22 | -if host_machine.system() == 'windows' | ||
23 | +if host_system == 'windows' | ||
24 | common_gio_tests_deps += [iphlpapi_dep, winsock2, cc.find_library ('secur32')] | ||
25 | endif | ||
26 | |||
27 | @@ -230,7 +230,7 @@ if have_dbus_daemon | ||
28 | endif | ||
29 | |||
30 | # Test programs buildable on UNIX only | ||
31 | -if host_machine.system() != 'windows' | ||
32 | +if host_system != 'windows' | ||
33 | gio_tests += { | ||
34 | 'file' : {}, | ||
35 | 'gdbus-peer-object-manager' : {}, | ||
36 | @@ -562,7 +562,7 @@ if host_machine.system() != 'windows' | ||
37 | endif # unix | ||
38 | |||
39 | # Test programs buildable on Windows only | ||
40 | -if host_machine.system() == 'windows' | ||
41 | +if host_system == 'windows' | ||
42 | gio_tests += {'win32-streams' : {}} | ||
43 | endif | ||
44 | |||
45 | @@ -632,7 +632,7 @@ if cc.get_id() != 'msvc' and cc.get_id() != 'clang-cl' | ||
46 | } | ||
47 | endif | ||
48 | |||
49 | -if host_machine.system() != 'windows' | ||
50 | +if host_system != 'windows' | ||
51 | test_extra_programs += { | ||
52 | 'gdbus-example-unix-fd-client' : { | ||
53 | 'install' : false, | ||
54 | diff --git a/glib/tests/meson.build b/glib/tests/meson.build | ||
55 | index d80c86e..5329cda 100644 | ||
56 | --- a/glib/tests/meson.build | ||
57 | +++ b/glib/tests/meson.build | ||
58 | @@ -216,7 +216,7 @@ if glib_conf.has('HAVE_EVENTFD') | ||
59 | } | ||
60 | endif | ||
61 | |||
62 | -if host_machine.system() == 'windows' | ||
63 | +if host_system == 'windows' | ||
64 | if winsock2.found() | ||
65 | glib_tests += { | ||
66 | 'gpoll' : { | ||
67 | diff --git a/meson.build b/meson.build | ||
68 | index 813c9b7..6ee775e 100644 | ||
69 | --- a/meson.build | ||
70 | +++ b/meson.build | ||
71 | @@ -54,6 +54,9 @@ else | ||
72 | endif | ||
73 | |||
74 | host_system = host_machine.system() | ||
75 | +if host_system == 'mingw32' | ||
76 | + host_system = 'windows' | ||
77 | +endif | ||
78 | |||
79 | if host_system == 'darwin' | ||
80 | ios_test_code = '''#include <TargetConditionals.h> | ||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch b/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch deleted file mode 100644 index b11c283e6d..0000000000 --- a/meta/recipes-core/glib-2.0/glib-2.0/0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch +++ /dev/null | |||
@@ -1,34 +0,0 @@ | |||
1 | From a8eb944a10353403241608a084787f6efcbb2eb7 Mon Sep 17 00:00:00 2001 | ||
2 | From: Jordan Williams <jordan@jwillikers.com> | ||
3 | Date: Fri, 1 Dec 2023 09:53:50 -0600 | ||
4 | Subject: [PATCH] Switch from the deprecated distutils module to the packaging | ||
5 | module | ||
6 | |||
7 | The distutils module was removed in Python 3.12. | ||
8 | |||
9 | Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/glib/-/commit/6ef967a0f930ce37a8c9b5aff969693b34714291] | ||
10 | |||
11 | Signed-off-by: Martin Jansa <martin.jansa@gmail.com> | ||
12 | --- | ||
13 | gio/gdbus-2.0/codegen/utils.py | 4 ++-- | ||
14 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
15 | |||
16 | diff --git a/gio/gdbus-2.0/codegen/utils.py b/gio/gdbus-2.0/codegen/utils.py | ||
17 | index 0204610..08f1ba9 100644 | ||
18 | --- a/gio/gdbus-2.0/codegen/utils.py | ||
19 | +++ b/gio/gdbus-2.0/codegen/utils.py | ||
20 | @@ -19,7 +19,7 @@ | ||
21 | # | ||
22 | # Author: David Zeuthen <davidz@redhat.com> | ||
23 | |||
24 | -import distutils.version | ||
25 | +import packaging.version | ||
26 | import os | ||
27 | import sys | ||
28 | |||
29 | @@ -166,4 +166,4 @@ def version_cmp_key(key): | ||
30 | v = str(key[0]) | ||
31 | else: | ||
32 | v = "0" | ||
33 | - return (distutils.version.LooseVersion(v), key[1]) | ||
34 | + return (packaging.version.Version(v), key[1]) | ||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch b/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch deleted file mode 100644 index bdfbd55899..0000000000 --- a/meta/recipes-core/glib-2.0/glib-2.0/fix-regex.patch +++ /dev/null | |||
@@ -1,54 +0,0 @@ | |||
1 | From cce3ae98a2c1966719daabff5a4ec6cf94a846f6 Mon Sep 17 00:00:00 2001 | ||
2 | From: Philip Withnall <pwithnall@gnome.org> | ||
3 | Date: Mon, 26 Feb 2024 16:55:44 +0000 | ||
4 | Subject: [PATCH] tests: Remove variable-length lookbehind tests for GRegex | ||
5 | MIME-Version: 1.0 | ||
6 | Content-Type: text/plain; charset=UTF-8 | ||
7 | Content-Transfer-Encoding: 8bit | ||
8 | |||
9 | PCRE2 10.43 has now introduced support for variable-length lookbehind, | ||
10 | so these tests now fail if GLib is built against PCRE2 10.43 or higher. | ||
11 | |||
12 | See | ||
13 | https://github.com/PCRE2Project/pcre2/blob/e8db6fa7137f4c6f66cb87e0a3c9467252ec1ef7/ChangeLog#L94. | ||
14 | |||
15 | Rather than making the tests conditional on the version of PCRE2 in use, | ||
16 | just remove them. They are mostly testing the PCRE2 code rather than | ||
17 | any code in GLib, so don’t have much value. | ||
18 | |||
19 | This should fix CI runs on msys2-mingw32, which updated to PCRE2 10.43 2 | ||
20 | days ago. | ||
21 | |||
22 | Signed-off-by: Philip Withnall <pwithnall@gnome.org> | ||
23 | |||
24 | Upstream-Status: Backport [https://gitlab.gnome.org/GNOME/glib/-/commit/cce3ae98a2c1966719daabff5a4ec6cf94a846f6] | ||
25 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | ||
26 | --- | ||
27 | glib/tests/regex.c | 10 ---------- | ||
28 | 1 file changed, 10 deletions(-) | ||
29 | |||
30 | diff --git a/glib/tests/regex.c b/glib/tests/regex.c | ||
31 | index 1082526292..d7a698ec67 100644 | ||
32 | --- a/glib/tests/regex.c | ||
33 | +++ b/glib/tests/regex.c | ||
34 | @@ -1885,16 +1885,6 @@ test_lookbehind (void) | ||
35 | g_match_info_free (match); | ||
36 | g_regex_unref (regex); | ||
37 | |||
38 | - regex = g_regex_new ("(?<!dogs?|cats?) x", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error); | ||
39 | - g_assert (regex == NULL); | ||
40 | - g_assert_error (error, G_REGEX_ERROR, G_REGEX_ERROR_VARIABLE_LENGTH_LOOKBEHIND); | ||
41 | - g_clear_error (&error); | ||
42 | - | ||
43 | - regex = g_regex_new ("(?<=ab(c|de)) foo", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error); | ||
44 | - g_assert (regex == NULL); | ||
45 | - g_assert_error (error, G_REGEX_ERROR, G_REGEX_ERROR_VARIABLE_LENGTH_LOOKBEHIND); | ||
46 | - g_clear_error (&error); | ||
47 | - | ||
48 | regex = g_regex_new ("(?<=abc|abde)foo", G_REGEX_OPTIMIZE, G_REGEX_MATCH_DEFAULT, &error); | ||
49 | g_assert (regex); | ||
50 | g_assert_no_error (error); | ||
51 | -- | ||
52 | GitLab | ||
53 | |||
54 | |||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch b/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch deleted file mode 100644 index 4f38509da6..0000000000 --- a/meta/recipes-core/glib-2.0/glib-2.0/memory-monitor.patch +++ /dev/null | |||
@@ -1,361 +0,0 @@ | |||
1 | From ce840b6b111e1e109e511f6833d6aa419e2b723a Mon Sep 17 00:00:00 2001 | ||
2 | From: Philip Withnall <philip@tecnocode.co.uk> | ||
3 | Date: Tue, 23 Jan 2024 11:16:52 +0000 | ||
4 | Subject: [PATCH] Merge branch '2887-memory-monitor-tests' into 'main' | ||
5 | |||
6 | tests: Fix race condition in memory-monitor-dbus.test | ||
7 | |||
8 | Closes #2887 | ||
9 | |||
10 | See merge request GNOME/glib!3844 | ||
11 | |||
12 | Hopefully these commits fix the occasional failures we've been seeing: | ||
13 | https://bugzilla.yoctoproject.org/show_bug.cgi?id=15362 | ||
14 | |||
15 | Upstream-Status: Backport | ||
16 | Signed-off-by: Ross Burton <ross.burton@arm.com> | ||
17 | --- | ||
18 | gio/tests/memory-monitor-dbus.py.in | 64 +++++++++++++------- | ||
19 | gio/tests/memory-monitor-portal.py.in | 54 ++++++++++------- | ||
20 | gio/tests/power-profile-monitor-dbus.py.in | 35 ++++++----- | ||
21 | gio/tests/power-profile-monitor-portal.py.in | 34 ++++++----- | ||
22 | 4 files changed, 113 insertions(+), 74 deletions(-) | ||
23 | |||
24 | diff --git a/gio/tests/memory-monitor-dbus.py.in b/gio/tests/memory-monitor-dbus.py.in | ||
25 | index bf32918..7aae01e 100755 | ||
26 | --- a/gio/tests/memory-monitor-dbus.py.in | ||
27 | +++ b/gio/tests/memory-monitor-dbus.py.in | ||
28 | @@ -16,7 +16,6 @@ import sys | ||
29 | import subprocess | ||
30 | import fcntl | ||
31 | import os | ||
32 | -import time | ||
33 | |||
34 | import taptestrunner | ||
35 | |||
36 | @@ -57,53 +56,74 @@ try: | ||
37 | fcntl.fcntl(self.p_mock.stdout, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
38 | self.last_warning = -1 | ||
39 | self.dbusmock = dbus.Interface(self.obj_lmm, dbusmock.MOCK_IFACE) | ||
40 | + | ||
41 | + try: | ||
42 | + self.wait_for_bus_object('org.freedesktop.LowMemoryMonitor', | ||
43 | + '/org/freedesktop/LowMemoryMonitor', | ||
44 | + system_bus=True) | ||
45 | + except: | ||
46 | + raise | ||
47 | + | ||
48 | self.memory_monitor = Gio.MemoryMonitor.dup_default() | ||
49 | + assert("GMemoryMonitorDBus" in str(self.memory_monitor)) | ||
50 | self.memory_monitor.connect("low-memory-warning", self.memory_warning_cb) | ||
51 | self.mainloop = GLib.MainLoop() | ||
52 | self.main_context = self.mainloop.get_context() | ||
53 | |||
54 | + # The LowMemoryMonitor API is stateless: it doesn’t expose any | ||
55 | + # properties, just a warning signal. Emit the signal in a loop until | ||
56 | + # the GMemoryMonitor instance has initialised and synchronised to | ||
57 | + # the right state. | ||
58 | + def emit_warning(level): | ||
59 | + self.dbusmock.EmitWarning(level) | ||
60 | + return GLib.SOURCE_CONTINUE | ||
61 | + | ||
62 | + idle_id = GLib.idle_add(emit_warning, 0) | ||
63 | + while self.last_warning != 0: | ||
64 | + self.main_context.iteration(True) | ||
65 | + GLib.source_remove(idle_id) | ||
66 | + | ||
67 | def tearDown(self): | ||
68 | self.p_mock.terminate() | ||
69 | self.p_mock.wait() | ||
70 | |||
71 | - def assertEventually(self, condition, message=None, timeout=50): | ||
72 | + def assertEventually(self, condition, message=None, timeout=5): | ||
73 | '''Assert that condition function eventually returns True. | ||
74 | |||
75 | - Timeout is in deciseconds, defaulting to 50 (5 seconds). message is | ||
76 | + Timeout is in seconds, defaulting to 5 seconds. message is | ||
77 | printed on failure. | ||
78 | ''' | ||
79 | - while timeout >= 0: | ||
80 | - context = GLib.MainContext.default() | ||
81 | - while context.iteration(False): | ||
82 | - pass | ||
83 | - if condition(): | ||
84 | - break | ||
85 | - timeout -= 1 | ||
86 | - time.sleep(0.1) | ||
87 | - else: | ||
88 | - self.fail(message or 'timed out waiting for ' + str(condition)) | ||
89 | + if not message: | ||
90 | + message = 'timed out waiting for ' + str(condition) | ||
91 | + | ||
92 | + def timed_out_cb(message): | ||
93 | + self.fail(message) | ||
94 | + return GLib.SOURCE_REMOVE | ||
95 | + | ||
96 | + timeout_source = GLib.timeout_source_new_seconds(timeout) | ||
97 | + timeout_source.set_callback(timed_out_cb, message) | ||
98 | + timeout_source.attach(self.main_context) | ||
99 | + | ||
100 | + while not condition(): | ||
101 | + self.main_context.iteration(True) | ||
102 | + | ||
103 | + timeout_source.destroy() | ||
104 | |||
105 | def memory_warning_cb(self, monitor, level): | ||
106 | + print("Received memory warning signal, level", level) | ||
107 | self.last_warning = level | ||
108 | self.main_context.wakeup() | ||
109 | |||
110 | def test_low_memory_warning_signal(self): | ||
111 | '''LowMemoryWarning signal''' | ||
112 | |||
113 | - # Wait 2 seconds | ||
114 | - timeout = 2 | ||
115 | - while timeout > 0: | ||
116 | - time.sleep(0.5) | ||
117 | - timeout -= 0.5 | ||
118 | - self.main_context.iteration(False) | ||
119 | - | ||
120 | self.dbusmock.EmitWarning(100) | ||
121 | # Wait 2 seconds or until warning | ||
122 | - self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 20) | ||
123 | + self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 2) | ||
124 | |||
125 | self.dbusmock.EmitWarning(255) | ||
126 | # Wait 2 seconds or until warning | ||
127 | - self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 20) | ||
128 | + self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 2) | ||
129 | |||
130 | except ImportError as e: | ||
131 | @unittest.skip("Cannot import %s" % e.name) | ||
132 | diff --git a/gio/tests/memory-monitor-portal.py.in b/gio/tests/memory-monitor-portal.py.in | ||
133 | index 748cee8..f570508 100755 | ||
134 | --- a/gio/tests/memory-monitor-portal.py.in | ||
135 | +++ b/gio/tests/memory-monitor-portal.py.in | ||
136 | @@ -16,7 +16,6 @@ import sys | ||
137 | import subprocess | ||
138 | import fcntl | ||
139 | import os | ||
140 | -import time | ||
141 | |||
142 | import taptestrunner | ||
143 | |||
144 | @@ -80,26 +79,44 @@ try: | ||
145 | self.mainloop = GLib.MainLoop() | ||
146 | self.main_context = self.mainloop.get_context() | ||
147 | |||
148 | + # The LowMemoryMonitor API is stateless: it doesn’t expose any | ||
149 | + # properties, just a warning signal. Emit the signal in a loop until | ||
150 | + # the GMemoryMonitor instance has initialised and synchronised to | ||
151 | + # the right state. | ||
152 | + def emit_warning(level): | ||
153 | + self.dbusmock.EmitWarning(level) | ||
154 | + return GLib.SOURCE_CONTINUE | ||
155 | + | ||
156 | + idle_id = GLib.idle_add(self.emit_warning, 0) | ||
157 | + while self.last_warning != 0: | ||
158 | + self.main_context.iteration(True) | ||
159 | + GLib.source_remove(idle_id) | ||
160 | + | ||
161 | def tearDown(self): | ||
162 | self.p_mock.terminate() | ||
163 | self.p_mock.wait() | ||
164 | |||
165 | - def assertEventually(self, condition, message=None, timeout=50): | ||
166 | + def assertEventually(self, condition, message=None, timeout=5): | ||
167 | '''Assert that condition function eventually returns True. | ||
168 | |||
169 | - Timeout is in deciseconds, defaulting to 50 (5 seconds). message is | ||
170 | + Timeout is in seconds, defaulting to 5 seconds. message is | ||
171 | printed on failure. | ||
172 | ''' | ||
173 | - while timeout >= 0: | ||
174 | - context = GLib.MainContext.default() | ||
175 | - while context.iteration(False): | ||
176 | - pass | ||
177 | - if condition(): | ||
178 | - break | ||
179 | - timeout -= 1 | ||
180 | - time.sleep(0.1) | ||
181 | - else: | ||
182 | - self.fail(message or 'timed out waiting for ' + str(condition)) | ||
183 | + if not message: | ||
184 | + message = 'timed out waiting for ' + str(condition) | ||
185 | + | ||
186 | + def timed_out_cb(message): | ||
187 | + self.fail(message) | ||
188 | + return GLib.SOURCE_REMOVE | ||
189 | + | ||
190 | + timeout_source = GLib.timeout_source_new_seconds(timeout) | ||
191 | + timeout_source.set_callback(timed_out_cb, message) | ||
192 | + timeout_source.attach(self.main_context) | ||
193 | + | ||
194 | + while not condition(): | ||
195 | + self.main_context.iteration(True) | ||
196 | + | ||
197 | + timeout_source.destroy() | ||
198 | |||
199 | def portal_memory_warning_cb(self, monitor, level): | ||
200 | self.last_warning = level | ||
201 | @@ -108,20 +125,13 @@ try: | ||
202 | def test_low_memory_warning_portal_signal(self): | ||
203 | '''LowMemoryWarning signal''' | ||
204 | |||
205 | - # Wait 2 seconds | ||
206 | - timeout = 2 | ||
207 | - while timeout > 0: | ||
208 | - time.sleep(0.5) | ||
209 | - timeout -= 0.5 | ||
210 | - self.main_context.iteration(False) | ||
211 | - | ||
212 | self.dbusmock.EmitWarning(100) | ||
213 | # Wait 2 seconds or until warning | ||
214 | - self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 20) | ||
215 | + self.assertEventually(lambda: self.last_warning == 100, "'100' low-memory warning not received", 2) | ||
216 | |||
217 | self.dbusmock.EmitWarning(255) | ||
218 | # Wait 2 seconds or until warning | ||
219 | - self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 20) | ||
220 | + self.assertEventually(lambda: self.last_warning == 255, "'255' low-memory warning not received", 2) | ||
221 | |||
222 | except ImportError as e: | ||
223 | @unittest.skip("Cannot import %s" % e.name) | ||
224 | diff --git a/gio/tests/power-profile-monitor-dbus.py.in b/gio/tests/power-profile-monitor-dbus.py.in | ||
225 | index 06e594f..f955afc 100755 | ||
226 | --- a/gio/tests/power-profile-monitor-dbus.py.in | ||
227 | +++ b/gio/tests/power-profile-monitor-dbus.py.in | ||
228 | @@ -16,7 +16,6 @@ import sys | ||
229 | import subprocess | ||
230 | import fcntl | ||
231 | import os | ||
232 | -import time | ||
233 | |||
234 | import taptestrunner | ||
235 | |||
236 | @@ -58,6 +57,7 @@ try: | ||
237 | self.power_saver_enabled = False | ||
238 | self.dbus_props = dbus.Interface(self.obj_ppd, dbus.PROPERTIES_IFACE) | ||
239 | self.power_profile_monitor = Gio.PowerProfileMonitor.dup_default() | ||
240 | + assert("GPowerProfileMonitorDBus" in str(self.power_profile_monitor)) | ||
241 | self.power_profile_monitor.connect("notify::power-saver-enabled", self.power_saver_enabled_cb) | ||
242 | self.mainloop = GLib.MainLoop() | ||
243 | self.main_context = self.mainloop.get_context() | ||
244 | @@ -66,22 +66,27 @@ try: | ||
245 | self.p_mock.terminate() | ||
246 | self.p_mock.wait() | ||
247 | |||
248 | - def assertEventually(self, condition, message=None, timeout=50): | ||
249 | + def assertEventually(self, condition, message=None, timeout=5): | ||
250 | '''Assert that condition function eventually returns True. | ||
251 | |||
252 | - Timeout is in deciseconds, defaulting to 50 (5 seconds). message is | ||
253 | + Timeout is in seconds, defaulting to 5 seconds. message is | ||
254 | printed on failure. | ||
255 | ''' | ||
256 | - while timeout >= 0: | ||
257 | - context = GLib.MainContext.default() | ||
258 | - while context.iteration(False): | ||
259 | - pass | ||
260 | - if condition(): | ||
261 | - break | ||
262 | - timeout -= 1 | ||
263 | - time.sleep(0.1) | ||
264 | - else: | ||
265 | - self.fail(message or 'timed out waiting for ' + str(condition)) | ||
266 | + if not message: | ||
267 | + message = 'timed out waiting for ' + str(condition) | ||
268 | + | ||
269 | + def timed_out_cb(message): | ||
270 | + self.fail(message) | ||
271 | + return GLib.SOURCE_REMOVE | ||
272 | + | ||
273 | + timeout_source = GLib.timeout_source_new_seconds(timeout) | ||
274 | + timeout_source.set_callback(timed_out_cb, message) | ||
275 | + timeout_source.attach(self.main_context) | ||
276 | + | ||
277 | + while not condition(): | ||
278 | + self.main_context.iteration(True) | ||
279 | + | ||
280 | + timeout_source.destroy() | ||
281 | |||
282 | def power_saver_enabled_cb(self, spec, data): | ||
283 | self.power_saver_enabled = self.power_profile_monitor.get_power_saver_enabled() | ||
284 | @@ -92,10 +97,10 @@ try: | ||
285 | |||
286 | self.assertEqual(self.power_profile_monitor.get_power_saver_enabled(), False) | ||
287 | self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('power-saver', variant_level=1)) | ||
288 | - self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 10) | ||
289 | + self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 1) | ||
290 | |||
291 | self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('balanced', variant_level=1)) | ||
292 | - self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 10) | ||
293 | + self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 1) | ||
294 | |||
295 | except ImportError as e: | ||
296 | @unittest.skip("Cannot import %s" % e.name) | ||
297 | diff --git a/gio/tests/power-profile-monitor-portal.py.in b/gio/tests/power-profile-monitor-portal.py.in | ||
298 | index 09e9a45..ad2abf6 100755 | ||
299 | --- a/gio/tests/power-profile-monitor-portal.py.in | ||
300 | +++ b/gio/tests/power-profile-monitor-portal.py.in | ||
301 | @@ -16,7 +16,6 @@ import sys | ||
302 | import subprocess | ||
303 | import fcntl | ||
304 | import os | ||
305 | -import time | ||
306 | |||
307 | import taptestrunner | ||
308 | |||
309 | @@ -90,22 +89,27 @@ try: | ||
310 | self.p_mock.terminate() | ||
311 | self.p_mock.wait() | ||
312 | |||
313 | - def assertEventually(self, condition, message=None, timeout=50): | ||
314 | + def assertEventually(self, condition, message=None, timeout=5): | ||
315 | '''Assert that condition function eventually returns True. | ||
316 | |||
317 | - Timeout is in deciseconds, defaulting to 50 (5 seconds). message is | ||
318 | + Timeout is in seconds, defaulting to 5 seconds. message is | ||
319 | printed on failure. | ||
320 | ''' | ||
321 | - while timeout >= 0: | ||
322 | - context = GLib.MainContext.default() | ||
323 | - while context.iteration(False): | ||
324 | - pass | ||
325 | - if condition(): | ||
326 | - break | ||
327 | - timeout -= 1 | ||
328 | - time.sleep(0.1) | ||
329 | - else: | ||
330 | - self.fail(message or 'timed out waiting for ' + str(condition)) | ||
331 | + if not message: | ||
332 | + message = 'timed out waiting for ' + str(condition) | ||
333 | + | ||
334 | + def timed_out_cb(message): | ||
335 | + self.fail(message) | ||
336 | + return GLib.SOURCE_REMOVE | ||
337 | + | ||
338 | + timeout_source = GLib.timeout_source_new_seconds(timeout) | ||
339 | + timeout_source.set_callback(timed_out_cb, message) | ||
340 | + timeout_source.attach(self.main_context) | ||
341 | + | ||
342 | + while not condition(): | ||
343 | + self.main_context.iteration(True) | ||
344 | + | ||
345 | + timeout_source.destroy() | ||
346 | |||
347 | def power_saver_enabled_cb(self, spec, data): | ||
348 | self.power_saver_enabled = self.power_profile_monitor.get_power_saver_enabled() | ||
349 | @@ -116,10 +120,10 @@ try: | ||
350 | |||
351 | self.assertEqual(self.power_profile_monitor.get_power_saver_enabled(), False) | ||
352 | self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('power-saver', variant_level=1)) | ||
353 | - self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 10) | ||
354 | + self.assertEventually(lambda: self.power_saver_enabled == True, "power-saver didn't become enabled", 1) | ||
355 | |||
356 | self.dbus_props.Set('net.hadess.PowerProfiles', 'ActiveProfile', dbus.String('balanced', variant_level=1)) | ||
357 | - self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 10) | ||
358 | + self.assertEventually(lambda: self.power_saver_enabled == False, "power-saver didn't become disabled", 1) | ||
359 | |||
360 | def test_power_profile_power_saver_enabled_portal_default(self): | ||
361 | '''power-saver-enabled property default value''' | ||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb deleted file mode 100644 index b1669ead75..0000000000 --- a/meta/recipes-core/glib-2.0/glib-2.0_2.78.4.bb +++ /dev/null | |||
@@ -1,57 +0,0 @@ | |||
1 | require glib.inc | ||
2 | |||
3 | PE = "1" | ||
4 | |||
5 | SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}" | ||
6 | |||
7 | SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \ | ||
8 | file://run-ptest \ | ||
9 | file://0001-Fix-DATADIRNAME-on-uclibc-Linux.patch \ | ||
10 | file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \ | ||
11 | file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \ | ||
12 | file://0010-Do-not-hardcode-python-path-into-various-tools.patch \ | ||
13 | file://0001-Set-host_machine-correctly-when-building-with-mingw3.patch \ | ||
14 | file://0001-Do-not-write-bindir-into-pkg-config-files.patch \ | ||
15 | file://0001-meson-Run-atomics-test-on-clang-as-well.patch \ | ||
16 | file://0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch \ | ||
17 | file://0001-Switch-from-the-deprecated-distutils-module-to-the-p.patch \ | ||
18 | file://memory-monitor.patch \ | ||
19 | file://fix-regex.patch \ | ||
20 | file://skip-timeout.patch \ | ||
21 | " | ||
22 | SRC_URI:append:class-native = " file://relocate-modules.patch \ | ||
23 | file://0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch \ | ||
24 | " | ||
25 | |||
26 | SRC_URI[sha256sum] = "24b8e0672dca120cc32d394bccb85844e732e04fe75d18bb0573b2dbc7548f63" | ||
27 | |||
28 | # Find any meson cross files in FILESPATH that are relevant for the current | ||
29 | # build (using siteinfo) and add them to EXTRA_OEMESON. | ||
30 | inherit siteinfo | ||
31 | def find_meson_cross_files(d): | ||
32 | if bb.data.inherits_class('native', d): | ||
33 | return "" | ||
34 | |||
35 | thisdir = os.path.normpath(d.getVar("THISDIR")) | ||
36 | import collections | ||
37 | sitedata = siteinfo_data(d) | ||
38 | # filename -> found | ||
39 | files = collections.OrderedDict() | ||
40 | for path in d.getVar("FILESPATH").split(":"): | ||
41 | for element in sitedata: | ||
42 | filename = os.path.normpath(os.path.join(path, "meson.cross.d", element)) | ||
43 | sanitized_path = filename.replace(thisdir, "${THISDIR}") | ||
44 | if sanitized_path == filename: | ||
45 | if os.path.exists(filename): | ||
46 | bb.error("Cannot add '%s' to --cross-file, because it's not relative to THISDIR '%s' and sstate signature would contain this full path" % (filename, thisdir)) | ||
47 | continue | ||
48 | files[filename.replace(thisdir, "${THISDIR}")] = os.path.exists(filename) | ||
49 | |||
50 | items = ["--cross-file=" + k for k,v in files.items() if v] | ||
51 | d.appendVar("EXTRA_OEMESON", " " + " ".join(items)) | ||
52 | items = ["%s:%s" % (k, "True" if v else "False") for k,v in files.items()] | ||
53 | d.appendVarFlag("do_configure", "file-checksums", " " + " ".join(items)) | ||
54 | |||
55 | python () { | ||
56 | find_meson_cross_files(d) | ||
57 | } | ||
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.80.2.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.80.2.bb new file mode 100644 index 0000000000..ef80cc47cf --- /dev/null +++ b/meta/recipes-core/glib-2.0/glib-2.0_2.80.2.bb | |||
@@ -0,0 +1,2 @@ | |||
1 | require glib.inc | ||
2 | |||
diff --git a/meta/recipes-core/glib-2.0/glib.inc b/meta/recipes-core/glib-2.0/glib.inc index fac8875d84..690d1c162c 100644 --- a/meta/recipes-core/glib-2.0/glib.inc +++ b/meta/recipes-core/glib-2.0/glib.inc | |||
@@ -28,21 +28,26 @@ PACKAGES += "${PN}-codegen ${PN}-utils" | |||
28 | 28 | ||
29 | LEAD_SONAME = "libglib-2.0.*" | 29 | LEAD_SONAME = "libglib-2.0.*" |
30 | 30 | ||
31 | inherit meson gettext gtk-doc pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache manpages gobject-introspection-data | 31 | inherit meson gettext gi-docgen pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache manpages gobject-introspection-data |
32 | |||
33 | GTKDOC_MESON_OPTION = "gtk_doc" | ||
34 | 32 | ||
35 | S = "${WORKDIR}/glib-${PV}" | 33 | S = "${WORKDIR}/glib-${PV}" |
36 | 34 | ||
35 | GIDOCGEN_MESON_OPTION = "documentation" | ||
36 | |||
37 | PACKAGECONFIG ??= "libmount \ | 37 | PACKAGECONFIG ??= "libmount \ |
38 | ${@bb.utils.contains('GI_DATA_ENABLED', 'True', 'introspection', '', d)} \ | ||
38 | ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}" | 39 | ${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)}" |
40 | PACKAGECONFIG:class-native = "" | ||
41 | |||
39 | PACKAGECONFIG[libmount] = "-Dlibmount=enabled,-Dlibmount=disabled,util-linux" | 42 | PACKAGECONFIG[libmount] = "-Dlibmount=enabled,-Dlibmount=disabled,util-linux" |
40 | PACKAGECONFIG[manpages] = "-Dman=true, -Dman=false, libxslt-native xmlto-native" | 43 | PACKAGECONFIG[manpages] = "-Dman-pages=enabled, -Dman-pages=disabled, python3-docutils-native" |
41 | PACKAGECONFIG[libelf] = "-Dlibelf=enabled,-Dlibelf=disabled,elfutils" | 44 | PACKAGECONFIG[libelf] = "-Dlibelf=enabled,-Dlibelf=disabled,elfutils" |
42 | PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false," | 45 | PACKAGECONFIG[tests] = "-Dinstalled_tests=true,-Dinstalled_tests=false," |
43 | PACKAGECONFIG[selinux] = "-Dselinux=enabled,-Dselinux=disabled,libselinux" | 46 | PACKAGECONFIG[selinux] = "-Dselinux=enabled,-Dselinux=disabled,libselinux" |
47 | PACKAGECONFIG[introspection] = "-Dintrospection=enabled,-Dintrospection=disabled,gobject-introspection-native gobject-introspection glib-2.0-initial" | ||
44 | 48 | ||
45 | EXTRA_OEMESON = "-Ddtrace=false -Dsystemtap=false" | 49 | EXTRA_OEMESON = "-Ddtrace=false -Dsystemtap=false" |
50 | EXTRA_OEMESON:append:class-target = " ${@'-Dgir_dir_prefix=${libdir}' if d.getVar('MULTILIBS') else ''}" | ||
46 | 51 | ||
47 | do_configure:prepend() { | 52 | do_configure:prepend() { |
48 | sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/gio/gdbus-2.0/codegen/gdbus-codegen.in | 53 | sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/gio/gdbus-2.0/codegen/gdbus-codegen.in |
@@ -53,14 +58,16 @@ FILES:${PN} = "${libdir}/lib*${SOLIBS} \ | |||
53 | ${libexecdir}/*gio-querymodules \ | 58 | ${libexecdir}/*gio-querymodules \ |
54 | ${libexecdir}/*gio-launch-desktop \ | 59 | ${libexecdir}/*gio-launch-desktop \ |
55 | ${datadir}/glib-2.0/dtds \ | 60 | ${datadir}/glib-2.0/dtds \ |
56 | ${datadir}/glib-2.0/schemas" | 61 | ${datadir}/glib-2.0/schemas \ |
57 | 62 | ${libdir}/girepository-*/*.typelib \ | |
63 | " | ||
58 | FILES:${PN}-dev += "${libdir}/glib-2.0/include \ | 64 | FILES:${PN}-dev += "${libdir}/glib-2.0/include \ |
59 | ${libdir}/gio/modules/lib*${SOLIBSDEV} \ | 65 | ${libdir}/gio/modules/lib*${SOLIBSDEV} \ |
60 | ${libdir}/gio/modules/*.la \ | 66 | ${libdir}/gio/modules/*.la \ |
61 | ${datadir}/glib-2.0/gettext/po/Makefile.in.in \ | 67 | ${datadir}/glib-2.0/gettext/po/Makefile.in.in \ |
62 | ${datadir}/glib-2.0/schemas/gschema.dtd \ | 68 | ${datadir}/glib-2.0/schemas/gschema.dtd \ |
63 | ${datadir}/glib-2.0/valgrind/glib.supp \ | 69 | ${datadir}/glib-2.0/valgrind/glib.supp \ |
70 | ${datadir}/gir-*/*.gir ${libdir}/gir-*/*.gir \ | ||
64 | ${datadir}/gettext/its" | 71 | ${datadir}/gettext/its" |
65 | FILES:${PN}-dbg += "${datadir}/glib-2.0/gdb ${datadir}/gdb" | 72 | FILES:${PN}-dbg += "${datadir}/glib-2.0/gdb ${datadir}/gdb" |
66 | FILES:${PN}-codegen = "${datadir}/glib-2.0/codegen/*.py \ | 73 | FILES:${PN}-codegen = "${datadir}/glib-2.0/codegen/*.py \ |
@@ -204,3 +211,56 @@ RDEPENDS:${PN}-ptest:append:libc-glibc = "\ | |||
204 | locale-base-pl-pl.iso-8859-2 \ | 211 | locale-base-pl-pl.iso-8859-2 \ |
205 | locale-base-tr-tr \ | 212 | locale-base-tr-tr \ |
206 | " | 213 | " |
214 | |||
215 | PE = "1" | ||
216 | |||
217 | SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}" | ||
218 | |||
219 | SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \ | ||
220 | file://run-ptest \ | ||
221 | file://0001-Fix-DATADIRNAME-on-uclibc-Linux.patch \ | ||
222 | file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \ | ||
223 | file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \ | ||
224 | file://0010-Do-not-hardcode-python-path-into-various-tools.patch \ | ||
225 | file://0001-Do-not-write-bindir-into-pkg-config-files.patch \ | ||
226 | file://0001-meson-Run-atomics-test-on-clang-as-well.patch \ | ||
227 | file://0001-gio-tests-resources.c-comment-out-a-build-host-only-.patch \ | ||
228 | file://skip-timeout.patch \ | ||
229 | file://0001-girepository-introspection-correctly-install-.gir-fi.patch \ | ||
230 | " | ||
231 | SRC_URI:append:class-native = " file://relocate-modules.patch \ | ||
232 | file://0001-meson.build-do-not-enable-pidfd-features-on-native-g.patch \ | ||
233 | " | ||
234 | |||
235 | SRC_URI[sha256sum] = "b9cfb6f7a5bd5b31238fd5d56df226b2dda5ea37611475bf89f6a0f9400fe8bd" | ||
236 | |||
237 | # Find any meson cross files in FILESPATH that are relevant for the current | ||
238 | # build (using siteinfo) and add them to EXTRA_OEMESON. | ||
239 | inherit siteinfo | ||
240 | def find_meson_cross_files(d): | ||
241 | if bb.data.inherits_class('native', d): | ||
242 | return "" | ||
243 | |||
244 | thisdir = os.path.normpath(d.getVar("THISDIR")) | ||
245 | import collections | ||
246 | sitedata = siteinfo_data(d) | ||
247 | # filename -> found | ||
248 | files = collections.OrderedDict() | ||
249 | for path in d.getVar("FILESPATH").split(":"): | ||
250 | for element in sitedata: | ||
251 | filename = os.path.normpath(os.path.join(path, "meson.cross.d", element)) | ||
252 | sanitized_path = filename.replace(thisdir, "${THISDIR}") | ||
253 | if sanitized_path == filename: | ||
254 | if os.path.exists(filename): | ||
255 | bb.error("Cannot add '%s' to --cross-file, because it's not relative to THISDIR '%s' and sstate signature would contain this full path" % (filename, thisdir)) | ||
256 | continue | ||
257 | files[filename.replace(thisdir, "${THISDIR}")] = os.path.exists(filename) | ||
258 | |||
259 | items = ["--cross-file=" + k for k,v in files.items() if v] | ||
260 | d.appendVar("EXTRA_OEMESON", " " + " ".join(items)) | ||
261 | items = ["%s:%s" % (k, "True" if v else "False") for k,v in files.items()] | ||
262 | d.appendVarFlag("do_configure", "file-checksums", " " + " ".join(items)) | ||
263 | |||
264 | python () { | ||
265 | find_meson_cross_files(d) | ||
266 | } | ||
diff --git a/meta/recipes-core/glibc/glibc-testsuite_2.39.bb b/meta/recipes-core/glibc/glibc-testsuite_2.39.bb index 2e076f4b0f..3a2764e40b 100644 --- a/meta/recipes-core/glibc/glibc-testsuite_2.39.bb +++ b/meta/recipes-core/glibc/glibc-testsuite_2.39.bb | |||
@@ -18,7 +18,7 @@ TOOLCHAIN_TEST_HOST_PORT ??= "2222" | |||
18 | do_check[nostamp] = "1" | 18 | do_check[nostamp] = "1" |
19 | do_check[network] = "1" | 19 | do_check[network] = "1" |
20 | do_check:append () { | 20 | do_check:append () { |
21 | chmod 0755 ${WORKDIR}/check-test-wrapper | 21 | chmod 0755 ${UNPACKDIR}/check-test-wrapper |
22 | 22 | ||
23 | oe_runmake -i \ | 23 | oe_runmake -i \ |
24 | QEMU_SYSROOT="${RECIPE_SYSROOT}" \ | 24 | QEMU_SYSROOT="${RECIPE_SYSROOT}" \ |
@@ -26,7 +26,7 @@ do_check:append () { | |||
26 | SSH_HOST="${TOOLCHAIN_TEST_HOST}" \ | 26 | SSH_HOST="${TOOLCHAIN_TEST_HOST}" \ |
27 | SSH_HOST_USER="${TOOLCHAIN_TEST_HOST_USER}" \ | 27 | SSH_HOST_USER="${TOOLCHAIN_TEST_HOST_USER}" \ |
28 | SSH_HOST_PORT="${TOOLCHAIN_TEST_HOST_PORT}" \ | 28 | SSH_HOST_PORT="${TOOLCHAIN_TEST_HOST_PORT}" \ |
29 | test-wrapper="${WORKDIR}/check-test-wrapper ${TOOLCHAIN_TEST_TARGET}" \ | 29 | test-wrapper="${UNPACKDIR}/check-test-wrapper ${TOOLCHAIN_TEST_TARGET}" \ |
30 | check | 30 | check |
31 | } | 31 | } |
32 | 32 | ||
diff --git a/meta/recipes-core/glibc/glibc-version.inc b/meta/recipes-core/glibc/glibc-version.inc index 1e4a323d64..20125e4339 100644 --- a/meta/recipes-core/glibc/glibc-version.inc +++ b/meta/recipes-core/glibc/glibc-version.inc | |||
@@ -1,6 +1,6 @@ | |||
1 | SRCBRANCH ?= "release/2.39/master" | 1 | SRCBRANCH ?= "release/2.39/master" |
2 | PV = "2.39+git" | 2 | PV = "2.39+git" |
3 | SRCREV_glibc ?= "273a835fe7c685cc54266bb8b502787bad5e9bae" | 3 | SRCREV_glibc ?= "c7c3f5bf80ae86b34501f473f1a9fc545c911b7f" |
4 | SRCREV_localedef ?= "fab74f31b3811df543e24b6de47efdf45b538abc" | 4 | SRCREV_localedef ?= "fab74f31b3811df543e24b6de47efdf45b538abc" |
5 | 5 | ||
6 | GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git;protocol=https" | 6 | GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git;protocol=https" |
diff --git a/meta/recipes-core/images/build-appliance-image_15.0.0.bb b/meta/recipes-core/images/build-appliance-image_15.0.0.bb index 4cf55519cc..e406968df2 100644 --- a/meta/recipes-core/images/build-appliance-image_15.0.0.bb +++ b/meta/recipes-core/images/build-appliance-image_15.0.0.bb | |||
@@ -44,10 +44,10 @@ IMAGE_CMD:ext4:append () { | |||
44 | fakeroot do_populate_poky_src () { | 44 | fakeroot do_populate_poky_src () { |
45 | # Because fetch2's git's unpack uses -s cloneflag, the unpacked git repo | 45 | # Because fetch2's git's unpack uses -s cloneflag, the unpacked git repo |
46 | # will become invalid in the target. | 46 | # will become invalid in the target. |
47 | rm -rf ${WORKDIR}/git/.git | 47 | rm -rf ${UNPACKDIR}/git/.git |
48 | rm -f ${WORKDIR}/git/.gitignore | 48 | rm -f ${UNPACKDIR}/git/.gitignore |
49 | 49 | ||
50 | cp -R ${WORKDIR}/git ${IMAGE_ROOTFS}/home/builder/poky | 50 | cp -R ${UNPACKDIR}/git ${IMAGE_ROOTFS}/home/builder/poky |
51 | 51 | ||
52 | mkdir -p ${IMAGE_ROOTFS}/home/builder/poky/build/conf | 52 | mkdir -p ${IMAGE_ROOTFS}/home/builder/poky/build/conf |
53 | mkdir -p ${IMAGE_ROOTFS}/home/builder/poky/build/downloads | 53 | mkdir -p ${IMAGE_ROOTFS}/home/builder/poky/build/downloads |
@@ -58,10 +58,10 @@ fakeroot do_populate_poky_src () { | |||
58 | fi | 58 | fi |
59 | 59 | ||
60 | # Place the README_VirtualBox_Guest_Additions file in builders home folder. | 60 | # Place the README_VirtualBox_Guest_Additions file in builders home folder. |
61 | cp ${WORKDIR}/README_VirtualBox_Guest_Additions.txt ${IMAGE_ROOTFS}/home/builder/ | 61 | cp ${UNPACKDIR}/README_VirtualBox_Guest_Additions.txt ${IMAGE_ROOTFS}/home/builder/ |
62 | 62 | ||
63 | # Place the README_VirtualBox_Toaster file in builders home folder. | 63 | # Place the README_VirtualBox_Toaster file in builders home folder. |
64 | cp ${WORKDIR}/README_VirtualBox_Toaster.txt ${IMAGE_ROOTFS}/home/builder/ | 64 | cp ${UNPACKDIR}/README_VirtualBox_Toaster.txt ${IMAGE_ROOTFS}/home/builder/ |
65 | 65 | ||
66 | echo "INHERIT += \"rm_work\"" >> ${IMAGE_ROOTFS}/home/builder/poky/build/conf/auto.conf | 66 | echo "INHERIT += \"rm_work\"" >> ${IMAGE_ROOTFS}/home/builder/poky/build/conf/auto.conf |
67 | echo "export LC_ALL=en_US.utf8" >> ${IMAGE_ROOTFS}/home/builder/.bashrc | 67 | echo "export LC_ALL=en_US.utf8" >> ${IMAGE_ROOTFS}/home/builder/.bashrc |
@@ -129,10 +129,15 @@ python () { | |||
129 | d.delVarFlag("do_unpack", "noexec") | 129 | d.delVarFlag("do_unpack", "noexec") |
130 | } | 130 | } |
131 | 131 | ||
132 | # ${S} doesn't exist for us | ||
133 | do_qa_unpack() { | ||
134 | return | ||
135 | } | ||
136 | |||
132 | create_bundle_files () { | 137 | create_bundle_files () { |
133 | cd ${WORKDIR} | 138 | cd ${WORKDIR} |
134 | mkdir -p Yocto_Build_Appliance | 139 | mkdir -p Yocto_Build_Appliance |
135 | cp *.vmx* Yocto_Build_Appliance | 140 | cp ${UNPACKDIR}/*.vmx* Yocto_Build_Appliance |
136 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vmdk Yocto_Build_Appliance/Yocto_Build_Appliance.vmdk | 141 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vmdk Yocto_Build_Appliance/Yocto_Build_Appliance.vmdk |
137 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhdx Yocto_Build_Appliance/Yocto_Build_Appliance.vhdx | 142 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhdx Yocto_Build_Appliance/Yocto_Build_Appliance.vhdx |
138 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhd Yocto_Build_Appliance/Yocto_Build_Appliance.vhd | 143 | ln -sf ${IMGDEPLOYDIR}/${IMAGE_NAME}.wic.vhd Yocto_Build_Appliance/Yocto_Build_Appliance.vhd |
diff --git a/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb b/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb index 49c7fd71b1..ddf9d1b311 100644 --- a/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb +++ b/meta/recipes-core/init-ifupdown/init-ifupdown_1.0.bb | |||
@@ -15,7 +15,8 @@ SRC_URI = "file://copyright \ | |||
15 | file://interfaces \ | 15 | file://interfaces \ |
16 | file://nfsroot" | 16 | file://nfsroot" |
17 | 17 | ||
18 | S = "${WORKDIR}" | 18 | S = "${WORKDIR}/sources" |
19 | UNPACKDIR = "${S}" | ||
19 | 20 | ||
20 | do_install () { | 21 | do_install () { |
21 | install -d ${D}${sysconfdir}/init.d \ | 22 | install -d ${D}${sysconfdir}/init.d \ |
diff --git a/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb index 01d2771e3f..ec3544c67a 100644 --- a/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-boot_1.0.bb | |||
@@ -3,8 +3,8 @@ LICENSE = "MIT" | |||
3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
4 | SRC_URI = "file://init-boot.sh" | 4 | SRC_URI = "file://init-boot.sh" |
5 | 5 | ||
6 | 6 | S = "${WORKDIR}/sources" | |
7 | S = "${WORKDIR}" | 7 | UNPACKDIR = "${S}" |
8 | 8 | ||
9 | do_install() { | 9 | do_install() { |
10 | install -m 0755 ${S}/init-boot.sh ${D}/init | 10 | install -m 0755 ${S}/init-boot.sh ${D}/init |
diff --git a/meta/recipes-core/initrdscripts/initramfs-framework/init b/meta/recipes-core/initrdscripts/initramfs-framework/init index 567694aff7..e3d8caa0e1 100755 --- a/meta/recipes-core/initrdscripts/initramfs-framework/init +++ b/meta/recipes-core/initrdscripts/initramfs-framework/init | |||
@@ -54,17 +54,20 @@ debug() { | |||
54 | } | 54 | } |
55 | 55 | ||
56 | # Prints a message and start a endless loop | 56 | # Prints a message and start a endless loop |
57 | # Force reboot if init_fatal_reboot bootparam is set | ||
57 | fatal() { | 58 | fatal() { |
58 | echo $1 >/dev/console | 59 | echo $1 >/dev/console |
59 | echo >/dev/console | 60 | echo >/dev/console |
60 | 61 | ||
61 | if [ -n "$bootparam_init_fatal_sh" ]; then | 62 | if [ -n "$bootparam_init_fatal_reboot" ]; then |
62 | sh | 63 | reboot -f |
63 | else | 64 | elif [ -n "$bootparam_init_fatal_sh" ]; then |
64 | while [ "true" ]; do | 65 | sh |
65 | sleep 3600 | 66 | else |
66 | done | 67 | while [ "true" ]; do |
67 | fi | 68 | sleep 3600 |
69 | done | ||
70 | fi | ||
68 | } | 71 | } |
69 | 72 | ||
70 | # Variables shared amoung modules | 73 | # Variables shared amoung modules |
diff --git a/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb index 4dbb56a42d..bb4984366d 100644 --- a/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-framework_1.0.bb | |||
@@ -20,7 +20,8 @@ SRC_URI = "file://init \ | |||
20 | file://overlayroot \ | 20 | file://overlayroot \ |
21 | " | 21 | " |
22 | 22 | ||
23 | S = "${WORKDIR}" | 23 | S = "${WORKDIR}/sources" |
24 | UNPACKDIR = "${S}" | ||
24 | 25 | ||
25 | do_install() { | 26 | do_install() { |
26 | install -d ${D}/init.d | 27 | install -d ${D}/init.d |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb index a06e7902ee..40046f30a7 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-boot-tiny_1.0.bb | |||
@@ -5,8 +5,8 @@ DEPENDS = "virtual/kernel" | |||
5 | RDEPENDS:${PN} = "busybox-mdev" | 5 | RDEPENDS:${PN} = "busybox-mdev" |
6 | SRC_URI = "file://init-live.sh" | 6 | SRC_URI = "file://init-live.sh" |
7 | 7 | ||
8 | 8 | S = "${WORKDIR}/sources" | |
9 | S = "${WORKDIR}" | 9 | UNPACKDIR = "${S}" |
10 | 10 | ||
11 | do_install() { | 11 | do_install() { |
12 | install -m 0755 ${S}/init-live.sh ${D}/init | 12 | install -m 0755 ${S}/init-live.sh ${D}/init |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb index e1bf15d293..7851cc9605 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-boot_1.0.bb | |||
@@ -5,8 +5,8 @@ DEPENDS = "virtual/kernel" | |||
5 | RDEPENDS:${PN} = "udev udev-extraconf" | 5 | RDEPENDS:${PN} = "udev udev-extraconf" |
6 | SRC_URI = "file://init-live.sh" | 6 | SRC_URI = "file://init-live.sh" |
7 | 7 | ||
8 | 8 | S = "${WORKDIR}/sources" | |
9 | S = "${WORKDIR}" | 9 | UNPACKDIR = "${S}" |
10 | 10 | ||
11 | do_install() { | 11 | do_install() { |
12 | install -m 0755 ${S}/init-live.sh ${D}/init | 12 | install -m 0755 ${S}/init-live.sh ${D}/init |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install-efi-testfs_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install-efi-testfs_1.0.bb index 1225ce4df9..31291bcdf2 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-install-efi-testfs_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-install-efi-testfs_1.0.bb | |||
@@ -5,7 +5,8 @@ SRC_URI = "file://init-install-efi-testfs.sh" | |||
5 | 5 | ||
6 | RDEPENDS:${PN} = "parted e2fsprogs-mke2fs dosfstools" | 6 | RDEPENDS:${PN} = "parted e2fsprogs-mke2fs dosfstools" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | do_install() { | 11 | do_install() { |
11 | install -m 0755 ${S}/init-install-efi-testfs.sh ${D}/install-efi.sh | 12 | install -m 0755 ${S}/init-install-efi-testfs.sh ${D}/install-efi.sh |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb index ae7d5beb2f..ff3b5622db 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-install-efi_1.0.bb | |||
@@ -3,11 +3,11 @@ LICENSE = "MIT" | |||
3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
4 | SRC_URI = "file://init-install-efi.sh" | 4 | SRC_URI = "file://init-install-efi.sh" |
5 | 5 | ||
6 | |||
7 | RDEPENDS:${PN} = "parted e2fsprogs-mke2fs dosfstools util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}" | 6 | RDEPENDS:${PN} = "parted e2fsprogs-mke2fs dosfstools util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}" |
8 | RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}" | 7 | RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}" |
9 | 8 | ||
10 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
11 | 11 | ||
12 | do_install() { | 12 | do_install() { |
13 | install -m 0755 ${S}/init-install-efi.sh ${D}/install-efi.sh | 13 | install -m 0755 ${S}/init-install-efi.sh ${D}/install-efi.sh |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install-testfs_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install-testfs_1.0.bb index 018911f5d1..19f05f9fec 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-install-testfs_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-install-testfs_1.0.bb | |||
@@ -5,7 +5,8 @@ SRC_URI = "file://init-install-testfs.sh" | |||
5 | 5 | ||
6 | RDEPENDS:${PN} = "grub parted e2fsprogs-mke2fs" | 6 | RDEPENDS:${PN} = "grub parted e2fsprogs-mke2fs" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | do_install() { | 11 | do_install() { |
11 | install -m 0755 ${S}/init-install-testfs.sh ${D}/install.sh | 12 | install -m 0755 ${S}/init-install-testfs.sh ${D}/install.sh |
diff --git a/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb index 12b2820318..1d489e2b64 100644 --- a/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-live-install_1.0.bb | |||
@@ -3,8 +3,8 @@ LICENSE = "MIT" | |||
3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 3 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
4 | SRC_URI = "file://init-install.sh" | 4 | SRC_URI = "file://init-install.sh" |
5 | 5 | ||
6 | 6 | S = "${WORKDIR}/sources" | |
7 | S = "${WORKDIR}" | 7 | UNPACKDIR = "${S}" |
8 | 8 | ||
9 | RDEPENDS:${PN} = "grub parted e2fsprogs-mke2fs util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}" | 9 | RDEPENDS:${PN} = "grub parted e2fsprogs-mke2fs util-linux-blkid ${VIRTUAL-RUNTIME_base-utils}" |
10 | RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}" | 10 | RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}" |
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb index adea2330ae..bb3f275f26 100644 --- a/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-module-install-efi_1.0.bb | |||
@@ -7,7 +7,8 @@ RRECOMMENDS:${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog}" | |||
7 | 7 | ||
8 | SRC_URI = "file://init-install-efi.sh" | 8 | SRC_URI = "file://init-install-efi.sh" |
9 | 9 | ||
10 | S = "${WORKDIR}" | 10 | S = "${WORKDIR}/sources" |
11 | UNPACKDIR = "${S}" | ||
11 | 12 | ||
12 | do_install() { | 13 | do_install() { |
13 | install -d ${D}/init.d | 14 | install -d ${D}/init.d |
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb index e4ae466d7c..b87e59f347 100644 --- a/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-module-install_1.0.bb | |||
@@ -12,7 +12,8 @@ COMPATIBLE_HOST:armv7ve = 'null' | |||
12 | 12 | ||
13 | SRC_URI = "file://init-install.sh" | 13 | SRC_URI = "file://init-install.sh" |
14 | 14 | ||
15 | S = "${WORKDIR}" | 15 | S = "${WORKDIR}/sources" |
16 | UNPACKDIR = "${S}" | ||
16 | 17 | ||
17 | do_install() { | 18 | do_install() { |
18 | install -d ${D}/init.d | 19 | install -d ${D}/init.d |
diff --git a/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb b/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb index 4d2c11f452..4d9ef79a63 100644 --- a/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb +++ b/meta/recipes-core/initrdscripts/initramfs-module-setup-live_1.0.bb | |||
@@ -9,7 +9,8 @@ inherit allarch | |||
9 | FILESEXTRAPATHS:prepend := "${THISDIR}/initramfs-framework:" | 9 | FILESEXTRAPATHS:prepend := "${THISDIR}/initramfs-framework:" |
10 | SRC_URI = "file://setup-live" | 10 | SRC_URI = "file://setup-live" |
11 | 11 | ||
12 | S = "${WORKDIR}" | 12 | S = "${WORKDIR}/sources" |
13 | UNPACKDIR = "${S}" | ||
13 | 14 | ||
14 | do_install() { | 15 | do_install() { |
15 | install -d ${D}/init.d | 16 | install -d ${D}/init.d |
diff --git a/meta/recipes-core/initscripts/initscripts_1.0.bb b/meta/recipes-core/initscripts/initscripts_1.0.bb index 68eeb5e117..65f97a0af2 100644 --- a/meta/recipes-core/initscripts/initscripts_1.0.bb +++ b/meta/recipes-core/initscripts/initscripts_1.0.bb | |||
@@ -35,7 +35,8 @@ SRC_URI = "file://functions \ | |||
35 | ${@bb.utils.contains('DISTRO_FEATURES','selinux','file://sushell','',d)} \ | 35 | ${@bb.utils.contains('DISTRO_FEATURES','selinux','file://sushell','',d)} \ |
36 | " | 36 | " |
37 | 37 | ||
38 | S = "${WORKDIR}" | 38 | S = "${WORKDIR}/sources" |
39 | UNPACKDIR = "${S}" | ||
39 | 40 | ||
40 | SRC_URI:append:arm = " file://alignment.sh" | 41 | SRC_URI:append:arm = " file://alignment.sh" |
41 | SRC_URI:append:armeb = " file://alignment.sh" | 42 | SRC_URI:append:armeb = " file://alignment.sh" |
diff --git a/meta/recipes-core/libxml/libxml2_2.12.6.bb b/meta/recipes-core/libxml/libxml2_2.12.7.bb index 14fcff7fa4..84601c282f 100644 --- a/meta/recipes-core/libxml/libxml2_2.12.6.bb +++ b/meta/recipes-core/libxml/libxml2_2.12.7.bb | |||
@@ -20,7 +20,7 @@ SRC_URI += "http://www.w3.org/XML/Test/xmlts20130923.tar;subdir=${BP};name=testt | |||
20 | file://install-tests.patch \ | 20 | file://install-tests.patch \ |
21 | " | 21 | " |
22 | 22 | ||
23 | SRC_URI[archive.sha256sum] = "889c593a881a3db5fdd96cc9318c87df34eb648edfc458272ad46fd607353fbb" | 23 | SRC_URI[archive.sha256sum] = "24ae78ff1363a973e6d8beba941a7945da2ac056e19b53956aeb6927fd6cfb56" |
24 | SRC_URI[testtar.sha256sum] = "c6b2d42ee50b8b236e711a97d68e6c4b5c8d83e69a2be4722379f08702ea7273" | 24 | SRC_URI[testtar.sha256sum] = "c6b2d42ee50b8b236e711a97d68e6c4b5c8d83e69a2be4722379f08702ea7273" |
25 | 25 | ||
26 | # Disputed as a security issue, but fixed in d39f780 | 26 | # Disputed as a security issue, but fixed in d39f780 |
diff --git a/meta/recipes-core/meta/uninative-tarball.bb b/meta/recipes-core/meta/uninative-tarball.bb index 7eebcaf11a..0fd01fdb64 100644 --- a/meta/recipes-core/meta/uninative-tarball.bb +++ b/meta/recipes-core/meta/uninative-tarball.bb | |||
@@ -58,6 +58,8 @@ fakeroot archive_sdk() { | |||
58 | DEST="./${SDK_ARCH}-${SDK_OS}" | 58 | DEST="./${SDK_ARCH}-${SDK_OS}" |
59 | mv sysroots/${SDK_SYS} $DEST | 59 | mv sysroots/${SDK_SYS} $DEST |
60 | rm sysroots -rf | 60 | rm sysroots -rf |
61 | # There is a check in meta/files/toolchain-shar-extract.sh -- make sure to | ||
62 | # keep that check up to date if changing the `1024` | ||
61 | patchelf --set-interpreter ${@''.join('a' for n in range(1024))} $DEST/usr/bin/patchelf | 63 | patchelf --set-interpreter ${@''.join('a' for n in range(1024))} $DEST/usr/bin/patchelf |
62 | mv $DEST/usr/bin/patchelf $DEST/usr/bin/patchelf-uninative | 64 | mv $DEST/usr/bin/patchelf $DEST/usr/bin/patchelf-uninative |
63 | ${SDK_ARCHIVE_CMD} | 65 | ${SDK_ARCHIVE_CMD} |
diff --git a/meta/recipes-core/musl/bsd-headers.bb b/meta/recipes-core/musl/bsd-headers.bb index 887a816031..7d0bdee870 100644 --- a/meta/recipes-core/musl/bsd-headers.bb +++ b/meta/recipes-core/musl/bsd-headers.bb | |||
@@ -15,7 +15,8 @@ do_compile[noexec] = "1" | |||
15 | 15 | ||
16 | INHIBIT_DEFAULT_DEPS = "1" | 16 | INHIBIT_DEFAULT_DEPS = "1" |
17 | 17 | ||
18 | S = "${WORKDIR}" | 18 | S = "${WORKDIR}/sources" |
19 | UNPACKDIR = "${S}" | ||
19 | 20 | ||
20 | do_install() { | 21 | do_install() { |
21 | install -Dm 0644 ${S}/sys-queue.h ${D}${includedir}/sys/queue.h | 22 | install -Dm 0644 ${S}/sys-queue.h ${D}${includedir}/sys/queue.h |
diff --git a/meta/recipes-core/musl/libc-test_git.bb b/meta/recipes-core/musl/libc-test_git.bb index 619a959fd2..f55a125a89 100644 --- a/meta/recipes-core/musl/libc-test_git.bb +++ b/meta/recipes-core/musl/libc-test_git.bb | |||
@@ -50,7 +50,7 @@ do_install () { | |||
50 | } | 50 | } |
51 | 51 | ||
52 | do_install_ptest_base:append() { | 52 | do_install_ptest_base:append() { |
53 | install -Dm 0755 ${WORKDIR}/run-libc-ptests ${D}${PTEST_PATH}/run-libc-ptests | 53 | install -Dm 0755 ${UNPACKDIR}/run-libc-ptests ${D}${PTEST_PATH}/run-libc-ptests |
54 | } | 54 | } |
55 | 55 | ||
56 | COMPATIBLE_HOST = "null" | 56 | COMPATIBLE_HOST = "null" |
diff --git a/meta/recipes-core/musl/libssp-nonshared.bb b/meta/recipes-core/musl/libssp-nonshared.bb index 3faf8f00c3..e9f652fc3e 100644 --- a/meta/recipes-core/musl/libssp-nonshared.bb +++ b/meta/recipes-core/musl/libssp-nonshared.bb | |||
@@ -17,7 +17,8 @@ DEPENDS = "virtual/${TARGET_PREFIX}binutils \ | |||
17 | 17 | ||
18 | do_configure[noexec] = "1" | 18 | do_configure[noexec] = "1" |
19 | 19 | ||
20 | S = "${WORKDIR}" | 20 | S = "${WORKDIR}/sources" |
21 | UNPACKDIR = "${S}" | ||
21 | 22 | ||
22 | do_compile() { | 23 | do_compile() { |
23 | ${CC} ${CPPFLAGS} ${CFLAGS} -fPIE -c stack_chk.c -o stack_chk.o | 24 | ${CC} ${CPPFLAGS} ${CFLAGS} -fPIE -c stack_chk.c -o stack_chk.o |
diff --git a/meta/recipes-core/musl/musl-legacy-error.bb b/meta/recipes-core/musl/musl-legacy-error.bb index 5ce5a233ab..11a838a6e8 100644 --- a/meta/recipes-core/musl/musl-legacy-error.bb +++ b/meta/recipes-core/musl/musl-legacy-error.bb | |||
@@ -13,7 +13,8 @@ do_compile[noexec] = "1" | |||
13 | 13 | ||
14 | INHIBIT_DEFAULT_DEPS = "1" | 14 | INHIBIT_DEFAULT_DEPS = "1" |
15 | 15 | ||
16 | S = "${WORKDIR}" | 16 | S = "${WORKDIR}/sources" |
17 | UNPACKDIR = "${S}" | ||
17 | 18 | ||
18 | do_install() { | 19 | do_install() { |
19 | install -Dm 0644 ${S}/error.h -t ${D}${includedir} | 20 | install -Dm 0644 ${S}/error.h -t ${D}${includedir} |
diff --git a/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch b/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch deleted file mode 100644 index 1232c8c2a8..0000000000 --- a/meta/recipes-core/ncurses/files/0001-Fix-CVE-2023-29491.patch +++ /dev/null | |||
@@ -1,462 +0,0 @@ | |||
1 | From 3d54a41f12e9aa059f06e66e72d872f2283395b6 Mon Sep 17 00:00:00 2001 | ||
2 | From: Chen Qi <Qi.Chen@windriver.com> | ||
3 | Date: Sun, 30 Jul 2023 21:14:00 -0700 | ||
4 | Subject: [PATCH] Fix CVE-2023-29491 | ||
5 | |||
6 | CVE: CVE-2023-29491 | ||
7 | |||
8 | Upstream-Status: Backport [http://ncurses.scripts.mit.edu/?p=ncurses.git;a=commitdiff;h=eb51b1ea1f75a0ec17c9c5937cb28df1e8eeec56] | ||
9 | |||
10 | Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | ||
11 | --- | ||
12 | ncurses/tinfo/lib_tgoto.c | 10 +++- | ||
13 | ncurses/tinfo/lib_tparm.c | 116 ++++++++++++++++++++++++++++++++----- | ||
14 | ncurses/tinfo/read_entry.c | 3 + | ||
15 | progs/tic.c | 6 ++ | ||
16 | progs/tparm_type.c | 9 +++ | ||
17 | progs/tparm_type.h | 2 + | ||
18 | progs/tput.c | 61 ++++++++++++++++--- | ||
19 | 7 files changed, 185 insertions(+), 22 deletions(-) | ||
20 | |||
21 | diff --git a/ncurses/tinfo/lib_tgoto.c b/ncurses/tinfo/lib_tgoto.c | ||
22 | index 9cf5e100..c50ed4df 100644 | ||
23 | --- a/ncurses/tinfo/lib_tgoto.c | ||
24 | +++ b/ncurses/tinfo/lib_tgoto.c | ||
25 | @@ -207,6 +207,14 @@ tgoto(const char *string, int x, int y) | ||
26 | result = tgoto_internal(string, x, y); | ||
27 | else | ||
28 | #endif | ||
29 | - result = TIPARM_2(string, y, x); | ||
30 | + if ((result = TIPARM_2(string, y, x)) == NULL) { | ||
31 | + /* | ||
32 | + * Because termcap did not provide a more general solution such as | ||
33 | + * tparm(), it was necessary to handle single-parameter capabilities | ||
34 | + * using tgoto(). The internal _nc_tiparm() function returns a NULL | ||
35 | + * for that case; retry for the single-parameter case. | ||
36 | + */ | ||
37 | + result = TIPARM_1(string, y); | ||
38 | + } | ||
39 | returnPtr(result); | ||
40 | } | ||
41 | diff --git a/ncurses/tinfo/lib_tparm.c b/ncurses/tinfo/lib_tparm.c | ||
42 | index d9bdfd8f..a10a3877 100644 | ||
43 | --- a/ncurses/tinfo/lib_tparm.c | ||
44 | +++ b/ncurses/tinfo/lib_tparm.c | ||
45 | @@ -1086,6 +1086,64 @@ tparam_internal(TPARM_STATE *tps, const char *string, TPARM_DATA *data) | ||
46 | return (TPS(out_buff)); | ||
47 | } | ||
48 | |||
49 | +#ifdef CUR | ||
50 | +/* | ||
51 | + * Only a few standard capabilities accept string parameters. The others that | ||
52 | + * are parameterized accept only numeric parameters. | ||
53 | + */ | ||
54 | +static bool | ||
55 | +check_string_caps(TPARM_DATA *data, const char *string) | ||
56 | +{ | ||
57 | + bool result = FALSE; | ||
58 | + | ||
59 | +#define CHECK_CAP(name) (VALID_STRING(name) && !strcmp(name, string)) | ||
60 | + | ||
61 | + /* | ||
62 | + * Disallow string parameters unless we can check them against a terminal | ||
63 | + * description. | ||
64 | + */ | ||
65 | + if (cur_term != NULL) { | ||
66 | + int want_type = 0; | ||
67 | + | ||
68 | + if (CHECK_CAP(pkey_key)) | ||
69 | + want_type = 2; /* function key #1, type string #2 */ | ||
70 | + else if (CHECK_CAP(pkey_local)) | ||
71 | + want_type = 2; /* function key #1, execute string #2 */ | ||
72 | + else if (CHECK_CAP(pkey_xmit)) | ||
73 | + want_type = 2; /* function key #1, transmit string #2 */ | ||
74 | + else if (CHECK_CAP(plab_norm)) | ||
75 | + want_type = 2; /* label #1, show string #2 */ | ||
76 | + else if (CHECK_CAP(pkey_plab)) | ||
77 | + want_type = 6; /* function key #1, type string #2, show string #3 */ | ||
78 | +#if NCURSES_XNAMES | ||
79 | + else { | ||
80 | + char *check; | ||
81 | + | ||
82 | + check = tigetstr("Cs"); | ||
83 | + if (CHECK_CAP(check)) | ||
84 | + want_type = 1; /* style #1 */ | ||
85 | + | ||
86 | + check = tigetstr("Ms"); | ||
87 | + if (CHECK_CAP(check)) | ||
88 | + want_type = 3; /* storage unit #1, content #2 */ | ||
89 | + } | ||
90 | +#endif | ||
91 | + | ||
92 | + if (want_type == data->tparm_type) { | ||
93 | + result = TRUE; | ||
94 | + } else { | ||
95 | + T(("unexpected string-parameter")); | ||
96 | + } | ||
97 | + } | ||
98 | + return result; | ||
99 | +} | ||
100 | + | ||
101 | +#define ValidCap() (myData.tparm_type == 0 || \ | ||
102 | + check_string_caps(&myData, string)) | ||
103 | +#else | ||
104 | +#define ValidCap() 1 | ||
105 | +#endif | ||
106 | + | ||
107 | #if NCURSES_TPARM_VARARGS | ||
108 | |||
109 | NCURSES_EXPORT(char *) | ||
110 | @@ -1100,7 +1158,7 @@ tparm(const char *string, ...) | ||
111 | tps->tname = "tparm"; | ||
112 | #endif /* TRACE */ | ||
113 | |||
114 | - if (tparm_setup(cur_term, string, &myData) == OK) { | ||
115 | + if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) { | ||
116 | va_list ap; | ||
117 | |||
118 | va_start(ap, string); | ||
119 | @@ -1135,7 +1193,7 @@ tparm(const char *string, | ||
120 | tps->tname = "tparm"; | ||
121 | #endif /* TRACE */ | ||
122 | |||
123 | - if (tparm_setup(cur_term, string, &myData) == OK) { | ||
124 | + if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) { | ||
125 | |||
126 | myData.param[0] = a1; | ||
127 | myData.param[1] = a2; | ||
128 | @@ -1166,7 +1224,7 @@ tiparm(const char *string, ...) | ||
129 | tps->tname = "tiparm"; | ||
130 | #endif /* TRACE */ | ||
131 | |||
132 | - if (tparm_setup(cur_term, string, &myData) == OK) { | ||
133 | + if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) { | ||
134 | va_list ap; | ||
135 | |||
136 | va_start(ap, string); | ||
137 | @@ -1179,7 +1237,25 @@ tiparm(const char *string, ...) | ||
138 | } | ||
139 | |||
140 | /* | ||
141 | - * The internal-use flavor ensures that the parameters are numbers, not strings | ||
142 | + * The internal-use flavor ensures that parameters are numbers, not strings. | ||
143 | + * In addition to ensuring that they are numbers, it ensures that the parameter | ||
144 | + * count is consistent with intended usage. | ||
145 | + * | ||
146 | + * Unlike the general-purpose tparm/tiparm, these internal calls are fairly | ||
147 | + * well defined: | ||
148 | + * | ||
149 | + * expected == 0 - not applicable | ||
150 | + * expected == 1 - set color, or vertical/horizontal addressing | ||
151 | + * expected == 2 - cursor addressing | ||
152 | + * expected == 4 - initialize color or color pair | ||
153 | + * expected == 9 - set attributes | ||
154 | + * | ||
155 | + * Only for the last case (set attributes) should a parameter be optional. | ||
156 | + * Also, a capability which calls for more parameters than expected should be | ||
157 | + * ignored. | ||
158 | + * | ||
159 | + * Return a null if the parameter-checks fail. Otherwise, return a pointer to | ||
160 | + * the formatted capability string. | ||
161 | */ | ||
162 | NCURSES_EXPORT(char *) | ||
163 | _nc_tiparm(int expected, const char *string, ...) | ||
164 | @@ -1189,22 +1265,36 @@ _nc_tiparm(int expected, const char *string, ...) | ||
165 | char *result = NULL; | ||
166 | |||
167 | _nc_tparm_err = 0; | ||
168 | + T((T_CALLED("_nc_tiparm(%d, %s, ...)"), expected, _nc_visbuf(string))); | ||
169 | #ifdef TRACE | ||
170 | tps->tname = "_nc_tiparm"; | ||
171 | #endif /* TRACE */ | ||
172 | |||
173 | - if (tparm_setup(cur_term, string, &myData) == OK | ||
174 | - && myData.num_actual <= expected | ||
175 | - && myData.tparm_type == 0) { | ||
176 | - va_list ap; | ||
177 | + if (tparm_setup(cur_term, string, &myData) == OK && ValidCap()) { | ||
178 | + if (myData.num_actual == 0) { | ||
179 | + T(("missing parameter%s, expected %s%d", | ||
180 | + expected > 1 ? "s" : "", | ||
181 | + expected == 9 ? "up to " : "", | ||
182 | + expected)); | ||
183 | + } else if (myData.num_actual > expected) { | ||
184 | + T(("too many parameters, have %d, expected %d", | ||
185 | + myData.num_actual, | ||
186 | + expected)); | ||
187 | + } else if (expected != 9 && myData.num_actual != expected) { | ||
188 | + T(("expected %d parameters, have %d", | ||
189 | + myData.num_actual, | ||
190 | + expected)); | ||
191 | + } else { | ||
192 | + va_list ap; | ||
193 | |||
194 | - va_start(ap, string); | ||
195 | - tparm_copy_valist(&myData, FALSE, ap); | ||
196 | - va_end(ap); | ||
197 | + va_start(ap, string); | ||
198 | + tparm_copy_valist(&myData, FALSE, ap); | ||
199 | + va_end(ap); | ||
200 | |||
201 | - result = tparam_internal(tps, string, &myData); | ||
202 | + result = tparam_internal(tps, string, &myData); | ||
203 | + } | ||
204 | } | ||
205 | - return result; | ||
206 | + returnPtr(result); | ||
207 | } | ||
208 | |||
209 | /* | ||
210 | diff --git a/ncurses/tinfo/read_entry.c b/ncurses/tinfo/read_entry.c | ||
211 | index 2b1875ed..341337d2 100644 | ||
212 | --- a/ncurses/tinfo/read_entry.c | ||
213 | +++ b/ncurses/tinfo/read_entry.c | ||
214 | @@ -323,6 +323,9 @@ _nc_read_termtype(TERMTYPE2 *ptr, char *buffer, int limit) | ||
215 | || bool_count < 0 | ||
216 | || num_count < 0 | ||
217 | || str_count < 0 | ||
218 | + || bool_count > BOOLCOUNT | ||
219 | + || num_count > NUMCOUNT | ||
220 | + || str_count > STRCOUNT | ||
221 | || str_size < 0) { | ||
222 | returnDB(TGETENT_NO); | ||
223 | } | ||
224 | diff --git a/progs/tic.c b/progs/tic.c | ||
225 | index 93a0b491..888927e2 100644 | ||
226 | --- a/progs/tic.c | ||
227 | +++ b/progs/tic.c | ||
228 | @@ -2270,9 +2270,15 @@ check_1_infotocap(const char *name, NCURSES_CONST char *value, int count) | ||
229 | |||
230 | _nc_reset_tparm(NULL); | ||
231 | switch (actual) { | ||
232 | + case Str: | ||
233 | + result = TPARM_1(value, strings[1]); | ||
234 | + break; | ||
235 | case Num_Str: | ||
236 | result = TPARM_2(value, numbers[1], strings[2]); | ||
237 | break; | ||
238 | + case Str_Str: | ||
239 | + result = TPARM_2(value, strings[1], strings[2]); | ||
240 | + break; | ||
241 | case Num_Str_Str: | ||
242 | result = TPARM_3(value, numbers[1], strings[2], strings[3]); | ||
243 | break; | ||
244 | diff --git a/progs/tparm_type.c b/progs/tparm_type.c | ||
245 | index 3da4a077..644aa62a 100644 | ||
246 | --- a/progs/tparm_type.c | ||
247 | +++ b/progs/tparm_type.c | ||
248 | @@ -47,6 +47,7 @@ tparm_type(const char *name) | ||
249 | {code, {longname} }, \ | ||
250 | {code, {ti} }, \ | ||
251 | {code, {tc} } | ||
252 | +#define XD(code, onlyname) TD(code, onlyname, onlyname, onlyname) | ||
253 | TParams result = Numbers; | ||
254 | /* *INDENT-OFF* */ | ||
255 | static const struct { | ||
256 | @@ -58,6 +59,10 @@ tparm_type(const char *name) | ||
257 | TD(Num_Str, "pkey_xmit", "pfx", "px"), | ||
258 | TD(Num_Str, "plab_norm", "pln", "pn"), | ||
259 | TD(Num_Str_Str, "pkey_plab", "pfxl", "xl"), | ||
260 | +#if NCURSES_XNAMES | ||
261 | + XD(Str, "Cs"), | ||
262 | + XD(Str_Str, "Ms"), | ||
263 | +#endif | ||
264 | }; | ||
265 | /* *INDENT-ON* */ | ||
266 | |||
267 | @@ -80,12 +85,16 @@ guess_tparm_type(int nparam, char **p_is_s) | ||
268 | case 1: | ||
269 | if (!p_is_s[0]) | ||
270 | result = Numbers; | ||
271 | + if (p_is_s[0]) | ||
272 | + result = Str; | ||
273 | break; | ||
274 | case 2: | ||
275 | if (!p_is_s[0] && !p_is_s[1]) | ||
276 | result = Numbers; | ||
277 | if (!p_is_s[0] && p_is_s[1]) | ||
278 | result = Num_Str; | ||
279 | + if (p_is_s[0] && p_is_s[1]) | ||
280 | + result = Str_Str; | ||
281 | break; | ||
282 | case 3: | ||
283 | if (!p_is_s[0] && !p_is_s[1] && !p_is_s[2]) | ||
284 | diff --git a/progs/tparm_type.h b/progs/tparm_type.h | ||
285 | index 7c102a30..af5bcf0f 100644 | ||
286 | --- a/progs/tparm_type.h | ||
287 | +++ b/progs/tparm_type.h | ||
288 | @@ -45,8 +45,10 @@ | ||
289 | typedef enum { | ||
290 | Other = -1 | ||
291 | ,Numbers = 0 | ||
292 | + ,Str | ||
293 | ,Num_Str | ||
294 | ,Num_Str_Str | ||
295 | + ,Str_Str | ||
296 | } TParams; | ||
297 | |||
298 | extern TParams tparm_type(const char *name); | ||
299 | diff --git a/progs/tput.c b/progs/tput.c | ||
300 | index 4cd0c5ba..41508b72 100644 | ||
301 | --- a/progs/tput.c | ||
302 | +++ b/progs/tput.c | ||
303 | @@ -1,5 +1,5 @@ | ||
304 | /**************************************************************************** | ||
305 | - * Copyright 2018-2021,2022 Thomas E. Dickey * | ||
306 | + * Copyright 2018-2022,2023 Thomas E. Dickey * | ||
307 | * Copyright 1998-2016,2017 Free Software Foundation, Inc. * | ||
308 | * * | ||
309 | * Permission is hereby granted, free of charge, to any person obtaining a * | ||
310 | @@ -47,12 +47,15 @@ | ||
311 | #include <transform.h> | ||
312 | #include <tty_settings.h> | ||
313 | |||
314 | -MODULE_ID("$Id: tput.c,v 1.99 2022/02/26 23:19:31 tom Exp $") | ||
315 | +MODULE_ID("$Id: tput.c,v 1.102 2023/04/08 16:26:36 tom Exp $") | ||
316 | |||
317 | #define PUTS(s) fputs(s, stdout) | ||
318 | |||
319 | const char *_nc_progname = "tput"; | ||
320 | |||
321 | +static bool opt_v = FALSE; /* quiet, do not show warnings */ | ||
322 | +static bool opt_x = FALSE; /* clear scrollback if possible */ | ||
323 | + | ||
324 | static bool is_init = FALSE; | ||
325 | static bool is_reset = FALSE; | ||
326 | static bool is_clear = FALSE; | ||
327 | @@ -81,6 +84,7 @@ usage(const char *optstring) | ||
328 | KEEP(" -S << read commands from standard input") | ||
329 | KEEP(" -T TERM use this instead of $TERM") | ||
330 | KEEP(" -V print curses-version") | ||
331 | + KEEP(" -v verbose, show warnings") | ||
332 | KEEP(" -x do not try to clear scrollback") | ||
333 | KEEP("") | ||
334 | KEEP("Commands:") | ||
335 | @@ -148,7 +152,7 @@ exit_code(int token, int value) | ||
336 | * Returns nonzero on error. | ||
337 | */ | ||
338 | static int | ||
339 | -tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used) | ||
340 | +tput_cmd(int fd, TTY * settings, int argc, char **argv, int *used) | ||
341 | { | ||
342 | NCURSES_CONST char *name; | ||
343 | char *s; | ||
344 | @@ -231,7 +235,9 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used) | ||
345 | } else if (VALID_STRING(s)) { | ||
346 | if (argc > 1) { | ||
347 | int k; | ||
348 | + int narg; | ||
349 | int analyzed; | ||
350 | + int provided; | ||
351 | int popcount; | ||
352 | long numbers[1 + NUM_PARM]; | ||
353 | char *strings[1 + NUM_PARM]; | ||
354 | @@ -271,14 +277,45 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used) | ||
355 | |||
356 | popcount = 0; | ||
357 | _nc_reset_tparm(NULL); | ||
358 | + /* | ||
359 | + * Count the number of numeric parameters which are provided. | ||
360 | + */ | ||
361 | + provided = 0; | ||
362 | + for (narg = 1; narg < argc; ++narg) { | ||
363 | + char *ending = NULL; | ||
364 | + long check = strtol(argv[narg], &ending, 10); | ||
365 | + if (check < 0 || ending == argv[narg] || *ending != '\0') | ||
366 | + break; | ||
367 | + provided = narg; | ||
368 | + } | ||
369 | switch (paramType) { | ||
370 | + case Str: | ||
371 | + s = TPARM_1(s, strings[1]); | ||
372 | + analyzed = 1; | ||
373 | + if (provided == 0 && argc >= 1) | ||
374 | + provided++; | ||
375 | + break; | ||
376 | + case Str_Str: | ||
377 | + s = TPARM_2(s, strings[1], strings[2]); | ||
378 | + analyzed = 2; | ||
379 | + if (provided == 0 && argc >= 1) | ||
380 | + provided++; | ||
381 | + if (provided == 1 && argc >= 2) | ||
382 | + provided++; | ||
383 | + break; | ||
384 | case Num_Str: | ||
385 | s = TPARM_2(s, numbers[1], strings[2]); | ||
386 | analyzed = 2; | ||
387 | + if (provided == 1 && argc >= 2) | ||
388 | + provided++; | ||
389 | break; | ||
390 | case Num_Str_Str: | ||
391 | s = TPARM_3(s, numbers[1], strings[2], strings[3]); | ||
392 | analyzed = 3; | ||
393 | + if (provided == 1 && argc >= 2) | ||
394 | + provided++; | ||
395 | + if (provided == 2 && argc >= 3) | ||
396 | + provided++; | ||
397 | break; | ||
398 | case Numbers: | ||
399 | analyzed = _nc_tparm_analyze(NULL, s, p_is_s, &popcount); | ||
400 | @@ -316,7 +353,13 @@ tput_cmd(int fd, TTY * settings, bool opt_x, int argc, char **argv, int *used) | ||
401 | if (analyzed < popcount) { | ||
402 | analyzed = popcount; | ||
403 | } | ||
404 | - *used += analyzed; | ||
405 | + if (opt_v && (analyzed != provided)) { | ||
406 | + fprintf(stderr, "%s: %s parameters for \"%s\"\n", | ||
407 | + _nc_progname, | ||
408 | + (analyzed < provided ? "extra" : "missing"), | ||
409 | + argv[0]); | ||
410 | + } | ||
411 | + *used += provided; | ||
412 | } | ||
413 | |||
414 | /* use putp() in order to perform padding */ | ||
415 | @@ -339,7 +382,6 @@ main(int argc, char **argv) | ||
416 | int used; | ||
417 | TTY old_settings; | ||
418 | TTY tty_settings; | ||
419 | - bool opt_x = FALSE; /* clear scrollback if possible */ | ||
420 | bool is_alias; | ||
421 | bool need_tty; | ||
422 | |||
423 | @@ -348,7 +390,7 @@ main(int argc, char **argv) | ||
424 | |||
425 | term = getenv("TERM"); | ||
426 | |||
427 | - while ((c = getopt(argc, argv, is_alias ? "T:Vx" : "ST:Vx")) != -1) { | ||
428 | + while ((c = getopt(argc, argv, is_alias ? "T:Vvx" : "ST:Vvx")) != -1) { | ||
429 | switch (c) { | ||
430 | case 'S': | ||
431 | cmdline = FALSE; | ||
432 | @@ -361,6 +403,9 @@ main(int argc, char **argv) | ||
433 | case 'V': | ||
434 | puts(curses_version()); | ||
435 | ExitProgram(EXIT_SUCCESS); | ||
436 | + case 'v': /* verbose */ | ||
437 | + opt_v = TRUE; | ||
438 | + break; | ||
439 | case 'x': /* do not try to clear scrollback */ | ||
440 | opt_x = TRUE; | ||
441 | break; | ||
442 | @@ -404,7 +449,7 @@ main(int argc, char **argv) | ||
443 | usage(NULL); | ||
444 | while (argc > 0) { | ||
445 | tty_settings = old_settings; | ||
446 | - code = tput_cmd(fd, &tty_settings, opt_x, argc, argv, &used); | ||
447 | + code = tput_cmd(fd, &tty_settings, argc, argv, &used); | ||
448 | if (code != 0) | ||
449 | break; | ||
450 | argc -= used; | ||
451 | @@ -439,7 +484,7 @@ main(int argc, char **argv) | ||
452 | while (argnum > 0) { | ||
453 | int code; | ||
454 | tty_settings = old_settings; | ||
455 | - code = tput_cmd(fd, &tty_settings, opt_x, argnum, argnow, &used); | ||
456 | + code = tput_cmd(fd, &tty_settings, argnum, argnow, &used); | ||
457 | if (code != 0) { | ||
458 | if (result == 0) | ||
459 | result = ErrSystem(0); /* will return value >4 */ | ||
460 | -- | ||
461 | 2.40.0 | ||
462 | |||
diff --git a/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch b/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch deleted file mode 100644 index 121db6bffe..0000000000 --- a/meta/recipes-core/ncurses/files/0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch +++ /dev/null | |||
@@ -1,499 +0,0 @@ | |||
1 | From 135d37072755704b8d018e5de74e62ff3f28c930 Mon Sep 17 00:00:00 2001 | ||
2 | From: Thomas E. Dickey <dickey@invisible-island.net> | ||
3 | Date: Sun, 5 Nov 2023 05:54:54 +0530 | ||
4 | Subject: [PATCH] Updating reset code - ncurses 6.4 - patch 20231104 | ||
5 | |||
6 | + modify reset command to avoid altering clocal if the terminal uses a | ||
7 | modem (prompted by discussion with Werner Fink, Michal Suchanek, | ||
8 | OpenSUSE #1201384, Debian #60377). | ||
9 | + build-fixes for --with-caps variations. | ||
10 | + correct a couple of section-references in INSTALL. | ||
11 | |||
12 | Signed-off-by: Thomas E. Dickey <dickey@invisible-island.net> | ||
13 | |||
14 | Upstream-Status: Backport [https://ncurses.scripts.mit.edu/?p=ncurses.git;a=commitdiff;h=135d37072755704b8d018e5de74e62ff3f28c930] | ||
15 | |||
16 | Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com> | ||
17 | --- | ||
18 | INSTALL | 8 +- | ||
19 | include/curses.events | 2 +- | ||
20 | ncurses/tinfo/lib_tparm.c | 2 + | ||
21 | progs/reset_cmd.c | 281 +++++++++++++++++++++----------------- | ||
22 | progs/tabs.c | 10 +- | ||
23 | progs/tic.c | 4 + | ||
24 | 6 files changed, 176 insertions(+), 131 deletions(-) | ||
25 | |||
26 | diff --git a/INSTALL b/INSTALL | ||
27 | index d9c1dd12..d0a39af0 100644 | ||
28 | --- a/INSTALL | ||
29 | +++ b/INSTALL | ||
30 | @@ -47,7 +47,7 @@ If you are converting from BSD curses and do not have root access, be sure | ||
31 | to read the BSD CONVERSION NOTES section below. | ||
32 | |||
33 | If you are trying to build applications using gpm with ncurses, | ||
34 | -read the USING NCURSES WITH GPM section below. | ||
35 | +read the USING GPM section below. | ||
36 | |||
37 | If you are cross-compiling, see the note below on BUILDING WITH A CROSS-COMPILER. | ||
38 | |||
39 | @@ -79,7 +79,7 @@ INSTALLATION PROCEDURE: | ||
40 | The --prefix option to configure changes the root directory for installing | ||
41 | ncurses. The default is normally in subdirectories of /usr/local, except | ||
42 | for systems where ncurses is normally installed as a system library (see | ||
43 | - "IF YOU ARE A SYSTEM INTEGRATOR"). Use --prefix=/usr to replace your | ||
44 | + "FOR SYSTEM INTEGRATORS"). Use --prefix=/usr to replace your | ||
45 | default curses distribution. | ||
46 | |||
47 | The package gets installed beneath the --prefix directory as follows: | ||
48 | @@ -176,7 +176,7 @@ INSTALLATION PROCEDURE: | ||
49 | You can make curses and terminfo fall back to an existing file of termcap | ||
50 | definitions by configuring with --enable-termcap. If you do this, the | ||
51 | library will search /etc/termcap before the terminfo database, and will | ||
52 | - also interpret the contents of the TERM environment variable. See the | ||
53 | + also interpret the contents of the $TERM environment variable. See the | ||
54 | section BSD CONVERSION NOTES below. | ||
55 | |||
56 | 3. Type `make'. Ignore any warnings, no error messages should be produced. | ||
57 | @@ -1231,7 +1231,7 @@ CONFIGURE OPTIONS: | ||
58 | Specify a search-list of terminfo directories which will be compiled | ||
59 | into the ncurses library (default: DATADIR/terminfo) | ||
60 | |||
61 | - This is a colon-separated list, like the TERMINFO_DIRS environment | ||
62 | + This is a colon-separated list, like the $TERMINFO_DIRS environment | ||
63 | variable. | ||
64 | |||
65 | --with-termlib[=XXX] | ||
66 | diff --git a/include/curses.events b/include/curses.events | ||
67 | index 25a2583f..468bde18 100644 | ||
68 | --- a/include/curses.events | ||
69 | +++ b/include/curses.events | ||
70 | @@ -50,6 +50,6 @@ typedef struct | ||
71 | extern NCURSES_EXPORT(int) wgetch_events (WINDOW *, _nc_eventlist *) GCC_DEPRECATED(experimental option); /* experimental */ | ||
72 | extern NCURSES_EXPORT(int) wgetnstr_events (WINDOW *,char *,int,_nc_eventlist *) GCC_DEPRECATED(experimental option); /* experimental */ | ||
73 | |||
74 | -#define KEY_EVENT 0633 /* We were interrupted by an event */ | ||
75 | +#define KEY_EVENT 0634 /* We were interrupted by an event */ | ||
76 | |||
77 | #endif /* NCURSES_WGETCH_EVENTS */ | ||
78 | diff --git a/ncurses/tinfo/lib_tparm.c b/ncurses/tinfo/lib_tparm.c | ||
79 | index a10a3877..cd972c0f 100644 | ||
80 | --- a/ncurses/tinfo/lib_tparm.c | ||
81 | +++ b/ncurses/tinfo/lib_tparm.c | ||
82 | @@ -1113,8 +1113,10 @@ check_string_caps(TPARM_DATA *data, const char *string) | ||
83 | want_type = 2; /* function key #1, transmit string #2 */ | ||
84 | else if (CHECK_CAP(plab_norm)) | ||
85 | want_type = 2; /* label #1, show string #2 */ | ||
86 | +#ifdef pkey_plab | ||
87 | else if (CHECK_CAP(pkey_plab)) | ||
88 | want_type = 6; /* function key #1, type string #2, show string #3 */ | ||
89 | +#endif | ||
90 | #if NCURSES_XNAMES | ||
91 | else { | ||
92 | char *check; | ||
93 | diff --git a/progs/reset_cmd.c b/progs/reset_cmd.c | ||
94 | index eff3af72..aec4b077 100644 | ||
95 | --- a/progs/reset_cmd.c | ||
96 | +++ b/progs/reset_cmd.c | ||
97 | @@ -75,6 +75,9 @@ MODULE_ID("$Id: reset_cmd.c,v 1.28 2021/10/02 18:08:44 tom Exp $") | ||
98 | # endif | ||
99 | #endif | ||
100 | |||
101 | +#define set_flags(target, mask) target |= mask | ||
102 | +#define clear_flags(target, mask) target &= ~((unsigned)(mask)) | ||
103 | + | ||
104 | static FILE *my_file; | ||
105 | |||
106 | static bool use_reset = FALSE; /* invoked as reset */ | ||
107 | @@ -188,6 +191,79 @@ out_char(int c) | ||
108 | #define reset_char(item, value) \ | ||
109 | tty_settings->c_cc[item] = CHK(tty_settings->c_cc[item], value) | ||
110 | |||
111 | +/* | ||
112 | + * Simplify ifdefs | ||
113 | + */ | ||
114 | +#ifndef BSDLY | ||
115 | +#define BSDLY 0 | ||
116 | +#endif | ||
117 | +#ifndef CRDLY | ||
118 | +#define CRDLY 0 | ||
119 | +#endif | ||
120 | +#ifndef ECHOCTL | ||
121 | +#define ECHOCTL 0 | ||
122 | +#endif | ||
123 | +#ifndef ECHOKE | ||
124 | +#define ECHOKE 0 | ||
125 | +#endif | ||
126 | +#ifndef ECHOPRT | ||
127 | +#define ECHOPRT 0 | ||
128 | +#endif | ||
129 | +#ifndef FFDLY | ||
130 | +#define FFDLY 0 | ||
131 | +#endif | ||
132 | +#ifndef IMAXBEL | ||
133 | +#define IMAXBEL 0 | ||
134 | +#endif | ||
135 | +#ifndef IUCLC | ||
136 | +#define IUCLC 0 | ||
137 | +#endif | ||
138 | +#ifndef IXANY | ||
139 | +#define IXANY 0 | ||
140 | +#endif | ||
141 | +#ifndef NLDLY | ||
142 | +#define NLDLY 0 | ||
143 | +#endif | ||
144 | +#ifndef OCRNL | ||
145 | +#define OCRNL 0 | ||
146 | +#endif | ||
147 | +#ifndef OFDEL | ||
148 | +#define OFDEL 0 | ||
149 | +#endif | ||
150 | +#ifndef OFILL | ||
151 | +#define OFILL 0 | ||
152 | +#endif | ||
153 | +#ifndef OLCUC | ||
154 | +#define OLCUC 0 | ||
155 | +#endif | ||
156 | +#ifndef ONLCR | ||
157 | +#define ONLCR 0 | ||
158 | +#endif | ||
159 | +#ifndef ONLRET | ||
160 | +#define ONLRET 0 | ||
161 | +#endif | ||
162 | +#ifndef ONOCR | ||
163 | +#define ONOCR 0 | ||
164 | +#endif | ||
165 | +#ifndef OXTABS | ||
166 | +#define OXTABS 0 | ||
167 | +#endif | ||
168 | +#ifndef TAB3 | ||
169 | +#define TAB3 0 | ||
170 | +#endif | ||
171 | +#ifndef TABDLY | ||
172 | +#define TABDLY 0 | ||
173 | +#endif | ||
174 | +#ifndef TOSTOP | ||
175 | +#define TOSTOP 0 | ||
176 | +#endif | ||
177 | +#ifndef VTDLY | ||
178 | +#define VTDLY 0 | ||
179 | +#endif | ||
180 | +#ifndef XCASE | ||
181 | +#define XCASE 0 | ||
182 | +#endif | ||
183 | + | ||
184 | /* | ||
185 | * Reset the terminal mode bits to a sensible state. Very useful after | ||
186 | * a child program dies in raw mode. | ||
187 | @@ -195,6 +271,10 @@ out_char(int c) | ||
188 | void | ||
189 | reset_tty_settings(int fd, TTY * tty_settings, int noset) | ||
190 | { | ||
191 | + unsigned mask; | ||
192 | +#ifdef TIOCMGET | ||
193 | + int modem_bits; | ||
194 | +#endif | ||
195 | GET_TTY(fd, tty_settings); | ||
196 | |||
197 | #ifdef TERMIOS | ||
198 | @@ -228,106 +308,65 @@ reset_tty_settings(int fd, TTY * tty_settings, int noset) | ||
199 | reset_char(VWERASE, CWERASE); | ||
200 | #endif | ||
201 | |||
202 | - tty_settings->c_iflag &= ~((unsigned) (IGNBRK | ||
203 | - | PARMRK | ||
204 | - | INPCK | ||
205 | - | ISTRIP | ||
206 | - | INLCR | ||
207 | - | IGNCR | ||
208 | -#ifdef IUCLC | ||
209 | - | IUCLC | ||
210 | -#endif | ||
211 | -#ifdef IXANY | ||
212 | - | IXANY | ||
213 | -#endif | ||
214 | - | IXOFF)); | ||
215 | - | ||
216 | - tty_settings->c_iflag |= (BRKINT | ||
217 | - | IGNPAR | ||
218 | - | ICRNL | ||
219 | - | IXON | ||
220 | -#ifdef IMAXBEL | ||
221 | - | IMAXBEL | ||
222 | -#endif | ||
223 | - ); | ||
224 | - | ||
225 | - tty_settings->c_oflag &= ~((unsigned) (0 | ||
226 | -#ifdef OLCUC | ||
227 | - | OLCUC | ||
228 | -#endif | ||
229 | -#ifdef OCRNL | ||
230 | - | OCRNL | ||
231 | -#endif | ||
232 | -#ifdef ONOCR | ||
233 | - | ONOCR | ||
234 | -#endif | ||
235 | -#ifdef ONLRET | ||
236 | - | ONLRET | ||
237 | -#endif | ||
238 | -#ifdef OFILL | ||
239 | - | OFILL | ||
240 | -#endif | ||
241 | -#ifdef OFDEL | ||
242 | - | OFDEL | ||
243 | -#endif | ||
244 | -#ifdef NLDLY | ||
245 | - | NLDLY | ||
246 | -#endif | ||
247 | -#ifdef CRDLY | ||
248 | - | CRDLY | ||
249 | -#endif | ||
250 | -#ifdef TABDLY | ||
251 | - | TABDLY | ||
252 | -#endif | ||
253 | -#ifdef BSDLY | ||
254 | - | BSDLY | ||
255 | -#endif | ||
256 | -#ifdef VTDLY | ||
257 | - | VTDLY | ||
258 | -#endif | ||
259 | -#ifdef FFDLY | ||
260 | - | FFDLY | ||
261 | -#endif | ||
262 | - )); | ||
263 | - | ||
264 | - tty_settings->c_oflag |= (OPOST | ||
265 | -#ifdef ONLCR | ||
266 | - | ONLCR | ||
267 | -#endif | ||
268 | - ); | ||
269 | - | ||
270 | - tty_settings->c_cflag &= ~((unsigned) (CSIZE | ||
271 | - | CSTOPB | ||
272 | - | PARENB | ||
273 | - | PARODD | ||
274 | - | CLOCAL)); | ||
275 | - tty_settings->c_cflag |= (CS8 | CREAD); | ||
276 | - tty_settings->c_lflag &= ~((unsigned) (ECHONL | ||
277 | - | NOFLSH | ||
278 | -#ifdef TOSTOP | ||
279 | - | TOSTOP | ||
280 | -#endif | ||
281 | -#ifdef ECHOPTR | ||
282 | - | ECHOPRT | ||
283 | -#endif | ||
284 | -#ifdef XCASE | ||
285 | - | XCASE | ||
286 | -#endif | ||
287 | - )); | ||
288 | - | ||
289 | - tty_settings->c_lflag |= (ISIG | ||
290 | - | ICANON | ||
291 | - | ECHO | ||
292 | - | ECHOE | ||
293 | - | ECHOK | ||
294 | -#ifdef ECHOCTL | ||
295 | - | ECHOCTL | ||
296 | -#endif | ||
297 | -#ifdef ECHOKE | ||
298 | - | ECHOKE | ||
299 | -#endif | ||
300 | - ); | ||
301 | -#endif | ||
302 | + clear_flags(tty_settings->c_iflag, (IGNBRK | ||
303 | + | PARMRK | ||
304 | + | INPCK | ||
305 | + | ISTRIP | ||
306 | + | INLCR | ||
307 | + | IGNCR | ||
308 | + | IUCLC | ||
309 | + | IXANY | ||
310 | + | IXOFF)); | ||
311 | + | ||
312 | + set_flags(tty_settings->c_iflag, (BRKINT | ||
313 | + | IGNPAR | ||
314 | + | ICRNL | ||
315 | + | IXON | ||
316 | + | IMAXBEL)); | ||
317 | + | ||
318 | + clear_flags(tty_settings->c_oflag, (0 | ||
319 | + | OLCUC | ||
320 | + | OCRNL | ||
321 | + | ONOCR | ||
322 | + | ONLRET | ||
323 | + | OFILL | ||
324 | + | OFDEL | ||
325 | + | NLDLY | ||
326 | + | CRDLY | ||
327 | + | TABDLY | ||
328 | + | BSDLY | ||
329 | + | VTDLY | ||
330 | + | FFDLY)); | ||
331 | + | ||
332 | + set_flags(tty_settings->c_oflag, (OPOST | ||
333 | + | ONLCR)); | ||
334 | + | ||
335 | + mask = (CSIZE | CSTOPB | PARENB | PARODD); | ||
336 | +#ifdef TIOCMGET | ||
337 | + /* leave clocal alone if this appears to use a modem */ | ||
338 | + if (ioctl(fd, TIOCMGET, &modem_bits) == -1) | ||
339 | + mask |= CLOCAL; | ||
340 | +#else | ||
341 | + /* cannot check - use the behavior from tset */ | ||
342 | + mask |= CLOCAL; | ||
343 | +#endif | ||
344 | + clear_flags(tty_settings->c_cflag, mask); | ||
345 | + | ||
346 | + set_flags(tty_settings->c_cflag, (CS8 | CREAD)); | ||
347 | + clear_flags(tty_settings->c_lflag, (ECHONL | ||
348 | + | NOFLSH | ||
349 | + | TOSTOP | ||
350 | + | ECHOPRT | ||
351 | + | XCASE)); | ||
352 | + | ||
353 | + set_flags(tty_settings->c_lflag, (ISIG | ||
354 | + | ICANON | ||
355 | + | ECHO | ||
356 | + | ECHOE | ||
357 | + | ECHOK | ||
358 | + | ECHOCTL | ||
359 | + | ECHOKE)); | ||
360 | +#endif /* TERMIOS */ | ||
361 | |||
362 | if (!noset) { | ||
363 | SET_TTY(fd, tty_settings); | ||
364 | @@ -402,29 +441,23 @@ set_conversions(TTY * tty_settings) | ||
365 | #if defined(EXP_WIN32_DRIVER) | ||
366 | /* FIXME */ | ||
367 | #else | ||
368 | -#ifdef ONLCR | ||
369 | - tty_settings->c_oflag |= ONLCR; | ||
370 | -#endif | ||
371 | - tty_settings->c_iflag |= ICRNL; | ||
372 | - tty_settings->c_lflag |= ECHO; | ||
373 | -#ifdef OXTABS | ||
374 | - tty_settings->c_oflag |= OXTABS; | ||
375 | -#endif /* OXTABS */ | ||
376 | + set_flags(tty_settings->c_oflag, ONLCR); | ||
377 | + set_flags(tty_settings->c_iflag, ICRNL); | ||
378 | + set_flags(tty_settings->c_lflag, ECHO); | ||
379 | + set_flags(tty_settings->c_oflag, OXTABS); | ||
380 | |||
381 | /* test used to be tgetflag("NL") */ | ||
382 | if (VALID_STRING(newline) && newline[0] == '\n' && !newline[1]) { | ||
383 | /* Newline, not linefeed. */ | ||
384 | -#ifdef ONLCR | ||
385 | - tty_settings->c_oflag &= ~((unsigned) ONLCR); | ||
386 | -#endif | ||
387 | - tty_settings->c_iflag &= ~((unsigned) ICRNL); | ||
388 | + clear_flags(tty_settings->c_oflag, ONLCR); | ||
389 | + clear_flags(tty_settings->c_iflag, ICRNL); | ||
390 | } | ||
391 | -#ifdef OXTABS | ||
392 | +#if OXTABS | ||
393 | /* test used to be tgetflag("pt") */ | ||
394 | if (VALID_STRING(set_tab) && VALID_STRING(clear_all_tabs)) | ||
395 | - tty_settings->c_oflag &= ~OXTABS; | ||
396 | + clear_flags(tty_settings->c_oflag, OXTABS); | ||
397 | #endif /* OXTABS */ | ||
398 | - tty_settings->c_lflag |= (ECHOE | ECHOK); | ||
399 | + set_flags(tty_settings->c_lflag, (ECHOE | ECHOK)); | ||
400 | #endif | ||
401 | } | ||
402 | |||
403 | @@ -490,7 +523,7 @@ send_init_strings(int fd GCC_UNUSED, TTY * old_settings) | ||
404 | bool need_flush = FALSE; | ||
405 | |||
406 | (void) old_settings; | ||
407 | -#ifdef TAB3 | ||
408 | +#if TAB3 | ||
409 | if (old_settings != 0 && | ||
410 | old_settings->c_oflag & (TAB3 | ONLCR | OCRNL | ONLRET)) { | ||
411 | old_settings->c_oflag &= (TAB3 | ONLCR | OCRNL | ONLRET); | ||
412 | @@ -512,22 +545,22 @@ send_init_strings(int fd GCC_UNUSED, TTY * old_settings) | ||
413 | |||
414 | if (VALID_STRING(clear_margins)) { | ||
415 | need_flush |= sent_string(clear_margins); | ||
416 | - } else | ||
417 | + } | ||
418 | #if defined(set_lr_margin) | ||
419 | - if (VALID_STRING(set_lr_margin)) { | ||
420 | + else if (VALID_STRING(set_lr_margin)) { | ||
421 | need_flush |= sent_string(TIPARM_2(set_lr_margin, 0, columns - 1)); | ||
422 | - } else | ||
423 | + } | ||
424 | #endif | ||
425 | #if defined(set_left_margin_parm) && defined(set_right_margin_parm) | ||
426 | - if (VALID_STRING(set_left_margin_parm) | ||
427 | - && VALID_STRING(set_right_margin_parm)) { | ||
428 | + else if (VALID_STRING(set_left_margin_parm) | ||
429 | + && VALID_STRING(set_right_margin_parm)) { | ||
430 | need_flush |= sent_string(TIPARM_1(set_left_margin_parm, 0)); | ||
431 | need_flush |= sent_string(TIPARM_1(set_right_margin_parm, | ||
432 | columns - 1)); | ||
433 | - } else | ||
434 | + } | ||
435 | #endif | ||
436 | - if (VALID_STRING(set_left_margin) | ||
437 | - && VALID_STRING(set_right_margin)) { | ||
438 | + else if (VALID_STRING(set_left_margin) | ||
439 | + && VALID_STRING(set_right_margin)) { | ||
440 | need_flush |= to_left_margin(); | ||
441 | need_flush |= sent_string(set_left_margin); | ||
442 | if (VALID_STRING(parm_right_cursor)) { | ||
443 | diff --git a/progs/tabs.c b/progs/tabs.c | ||
444 | index 7378d116..d904330b 100644 | ||
445 | --- a/progs/tabs.c | ||
446 | +++ b/progs/tabs.c | ||
447 | @@ -370,7 +370,9 @@ do_set_margin(int margin, bool no_op) | ||
448 | } | ||
449 | tputs(set_left_margin, 1, putch); | ||
450 | } | ||
451 | - } else if (VALID_STRING(set_left_margin_parm)) { | ||
452 | + } | ||
453 | +#if defined(set_left_margin_parm) && defined(set_right_margin_parm) | ||
454 | + else if (VALID_STRING(set_left_margin_parm)) { | ||
455 | result = TRUE; | ||
456 | if (!no_op) { | ||
457 | if (VALID_STRING(set_right_margin_parm)) { | ||
458 | @@ -379,12 +381,16 @@ do_set_margin(int margin, bool no_op) | ||
459 | tputs(TIPARM_2(set_left_margin_parm, margin, max_cols), 1, putch); | ||
460 | } | ||
461 | } | ||
462 | - } else if (VALID_STRING(set_lr_margin)) { | ||
463 | + } | ||
464 | +#endif | ||
465 | +#if defined(set_lr_margin) | ||
466 | + else if (VALID_STRING(set_lr_margin)) { | ||
467 | result = TRUE; | ||
468 | if (!no_op) { | ||
469 | tputs(TIPARM_2(set_lr_margin, margin, max_cols), 1, putch); | ||
470 | } | ||
471 | } | ||
472 | +#endif | ||
473 | return result; | ||
474 | } | ||
475 | |||
476 | diff --git a/progs/tic.c b/progs/tic.c | ||
477 | index 888927e2..78b568fa 100644 | ||
478 | --- a/progs/tic.c | ||
479 | +++ b/progs/tic.c | ||
480 | @@ -3142,6 +3142,7 @@ guess_ANSI_VTxx(TERMTYPE2 *tp) | ||
481 | * In particular, any ECMA-48 terminal should support these, though the details | ||
482 | * for u9 are implementation dependent. | ||
483 | */ | ||
484 | +#if defined(user6) && defined(user7) && defined(user8) && defined(user9) | ||
485 | static void | ||
486 | check_user_6789(TERMTYPE2 *tp) | ||
487 | { | ||
488 | @@ -3177,6 +3178,9 @@ check_user_6789(TERMTYPE2 *tp) | ||
489 | break; | ||
490 | } | ||
491 | } | ||
492 | +#else | ||
493 | +#define check_user_6789(tp) /* nothing */ | ||
494 | +#endif | ||
495 | |||
496 | /* other sanity-checks (things that we don't want in the normal | ||
497 | * logic that reads a terminfo entry) | ||
498 | -- | ||
499 | 2.40.0 | ||
diff --git a/meta/recipes-core/ncurses/files/0001-tic-hang.patch b/meta/recipes-core/ncurses/files/0001-tic-hang.patch index f98a943e5c..8cb92a3939 100644 --- a/meta/recipes-core/ncurses/files/0001-tic-hang.patch +++ b/meta/recipes-core/ncurses/files/0001-tic-hang.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 168ba7a681be73ac024438e33e14fde1d5aea97d Mon Sep 17 00:00:00 2001 | 1 | From a51a53f0eecfd4d083aba8dfcd47c65e93978ff1 Mon Sep 17 00:00:00 2001 |
2 | From: Hongxu Jia <hongxu.jia@windriver.com> | 2 | From: Hongxu Jia <hongxu.jia@windriver.com> |
3 | Date: Fri, 30 Mar 2018 10:02:24 +0800 | 3 | Date: Fri, 30 Mar 2018 10:02:24 +0800 |
4 | Subject: [PATCH 1/2] tic hang | 4 | Subject: [PATCH] tic hang |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [configuration] | 6 | Upstream-Status: Inappropriate [configuration] |
7 | 7 | ||
@@ -17,10 +17,10 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> | |||
17 | 1 file changed, 5 insertions(+), 6 deletions(-) | 17 | 1 file changed, 5 insertions(+), 6 deletions(-) |
18 | 18 | ||
19 | diff --git a/misc/terminfo.src b/misc/terminfo.src | 19 | diff --git a/misc/terminfo.src b/misc/terminfo.src |
20 | index 84f4810..6b385ec 100644 | 20 | index 5d575b8e..f9cc6880 100644 |
21 | --- a/misc/terminfo.src | 21 | --- a/misc/terminfo.src |
22 | +++ b/misc/terminfo.src | 22 | +++ b/misc/terminfo.src |
23 | @@ -5562,12 +5562,11 @@ konsole-xf3x|KDE console window with keyboard for XFree86 3.x xterm, | 23 | @@ -6518,12 +6518,11 @@ konsole-xf3x|KDE console window with keyboard for XFree86 3.x xterm, |
24 | # The value for kbs (see konsole-vt100) reflects local customization rather | 24 | # The value for kbs (see konsole-vt100) reflects local customization rather |
25 | # than the settings used for XFree86 xterm. | 25 | # than the settings used for XFree86 xterm. |
26 | konsole-xf4x|KDE console window with keyboard for XFree86 4.x xterm, | 26 | konsole-xf4x|KDE console window with keyboard for XFree86 4.x xterm, |
@@ -38,6 +38,3 @@ index 84f4810..6b385ec 100644 | |||
38 | 38 | ||
39 | # Obsolete: vt100.keymap | 39 | # Obsolete: vt100.keymap |
40 | # KDE's "vt100" keyboard has no relationship to any terminal that DEC made, but | 40 | # KDE's "vt100" keyboard has no relationship to any terminal that DEC made, but |
41 | -- | ||
42 | 1.8.3.1 | ||
43 | |||
diff --git a/meta/recipes-core/ncurses/files/0002-configure-reproducible.patch b/meta/recipes-core/ncurses/files/0002-configure-reproducible.patch index 66f26c06ab..11ca66c8e8 100644 --- a/meta/recipes-core/ncurses/files/0002-configure-reproducible.patch +++ b/meta/recipes-core/ncurses/files/0002-configure-reproducible.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From ec87e53066a9942e9aaba817d71268342f5e83b9 Mon Sep 17 00:00:00 2001 | 1 | From 63cf58044f4ab3297c5a2d0e132e87ebfa80c753 Mon Sep 17 00:00:00 2001 |
2 | From: Hongxu Jia <hongxu.jia@windriver.com> | 2 | From: Hongxu Jia <hongxu.jia@windriver.com> |
3 | Date: Wed, 16 Aug 2017 14:45:27 +0800 | 3 | Date: Wed, 16 Aug 2017 14:45:27 +0800 |
4 | Subject: [PATCH] configure: reproducible | 4 | Subject: [PATCH] configure: reproducible |
@@ -13,16 +13,15 @@ Signed-off-by: Juro Bystricky <juro.bystricky@intel.com> | |||
13 | Rebase to 6.1 | 13 | Rebase to 6.1 |
14 | 14 | ||
15 | Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> | 15 | Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> |
16 | |||
17 | --- | 16 | --- |
18 | configure | 2 +- | 17 | configure | 2 +- |
19 | 1 file changed, 1 insertion(+), 1 deletion(-) | 18 | 1 file changed, 1 insertion(+), 1 deletion(-) |
20 | 19 | ||
21 | diff --git a/configure b/configure | 20 | diff --git a/configure b/configure |
22 | index 421cf859..a1b7840d 100755 | 21 | index 488d93fc..005d44e2 100755 |
23 | --- a/configure | 22 | --- a/configure |
24 | +++ b/configure | 23 | +++ b/configure |
25 | @@ -5072,7 +5072,7 @@ else | 24 | @@ -5129,7 +5129,7 @@ else |
26 | ;; | 25 | ;; |
27 | (*) | 26 | (*) |
28 | cf_cv_ar_flags=unknown | 27 | cf_cv_ar_flags=unknown |
diff --git a/meta/recipes-core/ncurses/files/0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch b/meta/recipes-core/ncurses/files/0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch index a15694d4d4..d89399bbe5 100644 --- a/meta/recipes-core/ncurses/files/0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch +++ b/meta/recipes-core/ncurses/files/0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 10cd0c12a6e14fb4f0498c299c1dd32720b710da Mon Sep 17 00:00:00 2001 | 1 | From 5962a5ee2885f67a396f7e8955ac1bbd7f15d4aa Mon Sep 17 00:00:00 2001 |
2 | From: Nathan Rossi <nathan@nathanrossi.com> | 2 | From: Nathan Rossi <nathan@nathanrossi.com> |
3 | Date: Mon, 14 Dec 2020 13:39:02 +1000 | 3 | Date: Mon, 14 Dec 2020 13:39:02 +1000 |
4 | Subject: [PATCH] gen-pkgconfig.in: Do not include LDFLAGS in generated pc | 4 | Subject: [PATCH] gen-pkgconfig.in: Do not include LDFLAGS in generated pc |
@@ -10,13 +10,12 @@ includes build host specific paths and options (e.g. uninative and | |||
10 | 10 | ||
11 | Upstream-Status: Inappropriate [OE Specific] | 11 | Upstream-Status: Inappropriate [OE Specific] |
12 | Signed-off-by: Nathan Rossi <nathan@nathanrossi.com> | 12 | Signed-off-by: Nathan Rossi <nathan@nathanrossi.com> |
13 | |||
14 | --- | 13 | --- |
15 | misc/gen-pkgconfig.in | 2 +- | 14 | misc/gen-pkgconfig.in | 2 +- |
16 | 1 file changed, 1 insertion(+), 1 deletion(-) | 15 | 1 file changed, 1 insertion(+), 1 deletion(-) |
17 | 16 | ||
18 | diff --git a/misc/gen-pkgconfig.in b/misc/gen-pkgconfig.in | 17 | diff --git a/misc/gen-pkgconfig.in b/misc/gen-pkgconfig.in |
19 | index a45dd54f..85273054 100644 | 18 | index 89a5cd4a..07d94d17 100644 |
20 | --- a/misc/gen-pkgconfig.in | 19 | --- a/misc/gen-pkgconfig.in |
21 | +++ b/misc/gen-pkgconfig.in | 20 | +++ b/misc/gen-pkgconfig.in |
22 | @@ -83,7 +83,7 @@ if [ "$includedir" != "/usr/include" ]; then | 21 | @@ -83,7 +83,7 @@ if [ "$includedir" != "/usr/include" ]; then |
diff --git a/meta/recipes-core/ncurses/files/CVE-2023-45918.patch b/meta/recipes-core/ncurses/files/CVE-2023-45918.patch deleted file mode 100644 index fbdae49a61..0000000000 --- a/meta/recipes-core/ncurses/files/CVE-2023-45918.patch +++ /dev/null | |||
@@ -1,180 +0,0 @@ | |||
1 | From bcf02d3242f1c7d57224a95f7903fcf4b5e7695d Mon Sep 17 00:00:00 2001 | ||
2 | From: Thomas E. Dickey <dickey@invisible-island.net> | ||
3 | Date: Fri, 16 Jun 2023 02:54:29 +0530 | ||
4 | Subject: [PATCH] Fix CVE-2023-45918 | ||
5 | |||
6 | CVE: CVE-2023-45918 | ||
7 | |||
8 | Upstream-Status: Backport [https://ncurses.scripts.mit.edu/?p=ncurses.git;a=commit;h=bcf02d3242f1c7d57224a95f7903fcf4b5e7695d] | ||
9 | |||
10 | Signed-off-by: Soumya Sambu <soumya.sambu@windriver.com> | ||
11 | --- | ||
12 | ncurses/tinfo/comp_error.c | 15 ++++++--- | ||
13 | ncurses/tinfo/read_entry.c | 65 ++++++++++++++++++++++++++------------ | ||
14 | 2 files changed, 56 insertions(+), 24 deletions(-) | ||
15 | |||
16 | diff --git a/ncurses/tinfo/comp_error.c b/ncurses/tinfo/comp_error.c | ||
17 | index 48f48784..ee518e28 100644 | ||
18 | --- a/ncurses/tinfo/comp_error.c | ||
19 | +++ b/ncurses/tinfo/comp_error.c | ||
20 | @@ -60,8 +60,15 @@ _nc_get_source(void) | ||
21 | NCURSES_EXPORT(void) | ||
22 | _nc_set_source(const char *const name) | ||
23 | { | ||
24 | - FreeIfNeeded(SourceName); | ||
25 | - SourceName = strdup(name); | ||
26 | + if (name == NULL) { | ||
27 | + free(SourceName); | ||
28 | + SourceName = NULL; | ||
29 | + } else if (SourceName == NULL) { | ||
30 | + SourceName = strdup(name); | ||
31 | + } else if (strcmp(name, SourceName)) { | ||
32 | + free(SourceName); | ||
33 | + SourceName = strdup(name); | ||
34 | + } | ||
35 | } | ||
36 | |||
37 | NCURSES_EXPORT(void) | ||
38 | @@ -95,9 +102,9 @@ static NCURSES_INLINE void | ||
39 | where_is_problem(void) | ||
40 | { | ||
41 | fprintf(stderr, "\"%s\"", SourceName ? SourceName : "?"); | ||
42 | - if (_nc_curr_line >= 0) | ||
43 | + if (_nc_curr_line > 0) | ||
44 | fprintf(stderr, ", line %d", _nc_curr_line); | ||
45 | - if (_nc_curr_col >= 0) | ||
46 | + if (_nc_curr_col > 0) | ||
47 | fprintf(stderr, ", col %d", _nc_curr_col); | ||
48 | if (TermType != 0 && TermType[0] != '\0') | ||
49 | fprintf(stderr, ", terminal '%s'", TermType); | ||
50 | diff --git a/ncurses/tinfo/read_entry.c b/ncurses/tinfo/read_entry.c | ||
51 | index 341337d2..b0c3ad26 100644 | ||
52 | --- a/ncurses/tinfo/read_entry.c | ||
53 | +++ b/ncurses/tinfo/read_entry.c | ||
54 | @@ -138,12 +138,13 @@ convert_16bits(char *buf, NCURSES_INT2 *Numbers, int count) | ||
55 | } | ||
56 | #endif | ||
57 | |||
58 | -static void | ||
59 | -convert_strings(char *buf, char **Strings, int count, int size, char *table) | ||
60 | +static bool | ||
61 | +convert_strings(char *buf, char **Strings, int count, int size, | ||
62 | + char *table, bool always) | ||
63 | { | ||
64 | int i; | ||
65 | char *p; | ||
66 | - bool corrupt = FALSE; | ||
67 | + bool success = TRUE; | ||
68 | |||
69 | for (i = 0; i < count; i++) { | ||
70 | if (IS_NEG1(buf + 2 * i)) { | ||
71 | @@ -159,13 +160,10 @@ convert_strings(char *buf, char **Strings, int count, int size, char *table) | ||
72 | TR(TRACE_DATABASE, ("Strings[%d] = %s", i, | ||
73 | _nc_visbuf(Strings[i]))); | ||
74 | } else { | ||
75 | - if (!corrupt) { | ||
76 | - corrupt = TRUE; | ||
77 | - TR(TRACE_DATABASE, | ||
78 | - ("ignore out-of-range index %d to Strings[]", nn)); | ||
79 | - _nc_warning("corrupt data found in convert_strings"); | ||
80 | - } | ||
81 | - Strings[i] = ABSENT_STRING; | ||
82 | + TR(TRACE_DATABASE, | ||
83 | + ("found out-of-range index %d to Strings[%d]", nn, i)); | ||
84 | + success = FALSE; | ||
85 | + break; | ||
86 | } | ||
87 | } | ||
88 | |||
89 | @@ -175,10 +173,25 @@ convert_strings(char *buf, char **Strings, int count, int size, char *table) | ||
90 | if (*p == '\0') | ||
91 | break; | ||
92 | /* if there is no NUL, ignore the string */ | ||
93 | - if (p >= table + size) | ||
94 | + if (p >= table + size) { | ||
95 | Strings[i] = ABSENT_STRING; | ||
96 | + } else if (p == Strings[i] && always) { | ||
97 | + TR(TRACE_DATABASE, | ||
98 | + ("found empty but required Strings[%d]", i)); | ||
99 | + success = FALSE; | ||
100 | + break; | ||
101 | + } | ||
102 | + } else if (always) { /* names are always needed */ | ||
103 | + TR(TRACE_DATABASE, | ||
104 | + ("found invalid but required Strings[%d]", i)); | ||
105 | + success = FALSE; | ||
106 | + break; | ||
107 | } | ||
108 | } | ||
109 | + if (!success) { | ||
110 | + _nc_warning("corrupt data found in convert_strings"); | ||
111 | + } | ||
112 | + return success; | ||
113 | } | ||
114 | |||
115 | static int | ||
116 | @@ -382,7 +395,10 @@ _nc_read_termtype(TERMTYPE2 *ptr, char *buffer, int limit) | ||
117 | if (Read(string_table, (unsigned) str_size) != str_size) { | ||
118 | returnDB(TGETENT_NO); | ||
119 | } | ||
120 | - convert_strings(buf, ptr->Strings, str_count, str_size, string_table); | ||
121 | + if (!convert_strings(buf, ptr->Strings, str_count, str_size, | ||
122 | + string_table, FALSE)) { | ||
123 | + returnDB(TGETENT_NO); | ||
124 | + } | ||
125 | } | ||
126 | #if NCURSES_XNAMES | ||
127 | |||
128 | @@ -483,8 +499,10 @@ _nc_read_termtype(TERMTYPE2 *ptr, char *buffer, int limit) | ||
129 | ("Before computing extended-string capabilities " | ||
130 | "str_count=%d, ext_str_count=%d", | ||
131 | str_count, ext_str_count)); | ||
132 | - convert_strings(buf, ptr->Strings + str_count, ext_str_count, | ||
133 | - ext_str_limit, ptr->ext_str_table); | ||
134 | + if (!convert_strings(buf, ptr->Strings + str_count, ext_str_count, | ||
135 | + ext_str_limit, ptr->ext_str_table, FALSE)) { | ||
136 | + returnDB(TGETENT_NO); | ||
137 | + } | ||
138 | for (i = ext_str_count - 1; i >= 0; i--) { | ||
139 | TR(TRACE_DATABASE, ("MOVE from [%d:%d] %s", | ||
140 | i, i + str_count, | ||
141 | @@ -516,10 +534,13 @@ _nc_read_termtype(TERMTYPE2 *ptr, char *buffer, int limit) | ||
142 | TR(TRACE_DATABASE, | ||
143 | ("ext_NAMES starting @%d in extended_strings, first = %s", | ||
144 | base, _nc_visbuf(ptr->ext_str_table + base))); | ||
145 | - convert_strings(buf + (2 * ext_str_count), | ||
146 | - ptr->ext_Names, | ||
147 | - (int) need, | ||
148 | - ext_str_limit, ptr->ext_str_table + base); | ||
149 | + if (!convert_strings(buf + (2 * ext_str_count), | ||
150 | + ptr->ext_Names, | ||
151 | + (int) need, | ||
152 | + ext_str_limit, ptr->ext_str_table + base, | ||
153 | + TRUE)) { | ||
154 | + returnDB(TGETENT_NO); | ||
155 | + } | ||
156 | } | ||
157 | |||
158 | TR(TRACE_DATABASE, | ||
159 | @@ -572,13 +593,17 @@ _nc_read_file_entry(const char *const filename, TERMTYPE2 *ptr) | ||
160 | int limit; | ||
161 | char buffer[MAX_ENTRY_SIZE + 1]; | ||
162 | |||
163 | - if ((limit = (int) fread(buffer, sizeof(char), sizeof(buffer), fp)) | ||
164 | - > 0) { | ||
165 | + limit = (int) fread(buffer, sizeof(char), sizeof(buffer), fp); | ||
166 | + if (limit > 0) { | ||
167 | + const char *old_source = _nc_get_source(); | ||
168 | |||
169 | TR(TRACE_DATABASE, ("read terminfo %s", filename)); | ||
170 | + if (old_source == NULL) | ||
171 | + _nc_set_source(filename); | ||
172 | if ((code = _nc_read_termtype(ptr, buffer, limit)) == TGETENT_NO) { | ||
173 | _nc_free_termtype2(ptr); | ||
174 | } | ||
175 | + _nc_set_source(old_source); | ||
176 | } else { | ||
177 | code = TGETENT_NO; | ||
178 | } | ||
179 | -- | ||
180 | 2.40.0 | ||
diff --git a/meta/recipes-core/ncurses/files/CVE-2023-50495.patch b/meta/recipes-core/ncurses/files/CVE-2023-50495.patch deleted file mode 100644 index 7d90ddd30f..0000000000 --- a/meta/recipes-core/ncurses/files/CVE-2023-50495.patch +++ /dev/null | |||
@@ -1,301 +0,0 @@ | |||
1 | From 7daae3f2139a678fe0ae0b42fcf8d807cbff485c Mon Sep 17 00:00:00 2001 | ||
2 | From: Mingli Yu <mingli.yu@windriver.com> | ||
3 | Date: Sun, 4 Feb 2024 13:42:38 +0800 | ||
4 | Subject: [PATCH] parse_entry.c: check return value of _nc_save_str | ||
5 | |||
6 | * check return value of _nc_save_str(), in special case for tic where | ||
7 | extended capabilities are processed but the terminal description was | ||
8 | not initialized (report by Ziqiao Kong). | ||
9 | |||
10 | * regenerate llib-* files. | ||
11 | |||
12 | CVE: CVE-2023-50495 | ||
13 | |||
14 | Upstream-Status: Backport [http://ncurses.scripts.mit.edu/?p=ncurses.git;a=commitdiff;h=7723dd6799ab10b32047ec73b14df9f107bafe99] | ||
15 | |||
16 | Signed-off-by: Mingli Yu <mingli.yu@windriver.com> | ||
17 | --- | ||
18 | ncurses/llib-lncurses | 15 +++++++++++++++ | ||
19 | ncurses/llib-lncursest | 15 +++++++++++++++ | ||
20 | ncurses/llib-lncursestw | 15 +++++++++++++++ | ||
21 | ncurses/llib-lncursesw | 15 +++++++++++++++ | ||
22 | ncurses/llib-ltinfo | 15 +++++++++++++++ | ||
23 | ncurses/llib-ltinfot | 15 +++++++++++++++ | ||
24 | ncurses/llib-ltinfotw | 15 +++++++++++++++ | ||
25 | ncurses/llib-ltinfow | 15 +++++++++++++++ | ||
26 | ncurses/tinfo/parse_entry.c | 23 ++++++++++++++++------- | ||
27 | 9 files changed, 136 insertions(+), 7 deletions(-) | ||
28 | |||
29 | diff --git a/ncurses/llib-lncurses b/ncurses/llib-lncurses | ||
30 | index 211cf3b7..e4190aa2 100644 | ||
31 | --- a/ncurses/llib-lncurses | ||
32 | +++ b/ncurses/llib-lncurses | ||
33 | @@ -3656,6 +3656,21 @@ char *tiparm( | ||
34 | ...) | ||
35 | { return(*(char **)0); } | ||
36 | |||
37 | +#undef tiparm_s | ||
38 | +char *tiparm_s( | ||
39 | + int num_expected, | ||
40 | + int tparm_type, | ||
41 | + const char *string, | ||
42 | + ...) | ||
43 | + { return(*(char **)0); } | ||
44 | + | ||
45 | +#undef tiscan_s | ||
46 | +int tiscan_s( | ||
47 | + int *num_expected, | ||
48 | + int *tparm_type, | ||
49 | + const char *string) | ||
50 | + { return(*(int *)0); } | ||
51 | + | ||
52 | #undef _nc_tiparm | ||
53 | char *_nc_tiparm( | ||
54 | int expected, | ||
55 | diff --git a/ncurses/llib-lncursest b/ncurses/llib-lncursest | ||
56 | index 1b09d676..e07abba6 100644 | ||
57 | --- a/ncurses/llib-lncursest | ||
58 | +++ b/ncurses/llib-lncursest | ||
59 | @@ -3741,6 +3741,21 @@ char *tiparm( | ||
60 | ...) | ||
61 | { return(*(char **)0); } | ||
62 | |||
63 | +#undef tiparm_s | ||
64 | +char *tiparm_s( | ||
65 | + int num_expected, | ||
66 | + int tparm_type, | ||
67 | + const char *string, | ||
68 | + ...) | ||
69 | + { return(*(char **)0); } | ||
70 | + | ||
71 | +#undef tiscan_s | ||
72 | +int tiscan_s( | ||
73 | + int *num_expected, | ||
74 | + int *tparm_type, | ||
75 | + const char *string) | ||
76 | + { return(*(int *)0); } | ||
77 | + | ||
78 | #undef _nc_tiparm | ||
79 | char *_nc_tiparm( | ||
80 | int expected, | ||
81 | diff --git a/ncurses/llib-lncursestw b/ncurses/llib-lncursestw | ||
82 | index 4576e0fc..747c6be8 100644 | ||
83 | --- a/ncurses/llib-lncursestw | ||
84 | +++ b/ncurses/llib-lncursestw | ||
85 | @@ -4702,6 +4702,21 @@ char *tiparm( | ||
86 | ...) | ||
87 | { return(*(char **)0); } | ||
88 | |||
89 | +#undef tiparm_s | ||
90 | +char *tiparm_s( | ||
91 | + int num_expected, | ||
92 | + int tparm_type, | ||
93 | + const char *string, | ||
94 | + ...) | ||
95 | + { return(*(char **)0); } | ||
96 | + | ||
97 | +#undef tiscan_s | ||
98 | +int tiscan_s( | ||
99 | + int *num_expected, | ||
100 | + int *tparm_type, | ||
101 | + const char *string) | ||
102 | + { return(*(int *)0); } | ||
103 | + | ||
104 | #undef _nc_tiparm | ||
105 | char *_nc_tiparm( | ||
106 | int expected, | ||
107 | diff --git a/ncurses/llib-lncursesw b/ncurses/llib-lncursesw | ||
108 | index 127350d2..862305d9 100644 | ||
109 | --- a/ncurses/llib-lncursesw | ||
110 | +++ b/ncurses/llib-lncursesw | ||
111 | @@ -4617,6 +4617,21 @@ char *tiparm( | ||
112 | ...) | ||
113 | { return(*(char **)0); } | ||
114 | |||
115 | +#undef tiparm_s | ||
116 | +char *tiparm_s( | ||
117 | + int num_expected, | ||
118 | + int tparm_type, | ||
119 | + const char *string, | ||
120 | + ...) | ||
121 | + { return(*(char **)0); } | ||
122 | + | ||
123 | +#undef tiscan_s | ||
124 | +int tiscan_s( | ||
125 | + int *num_expected, | ||
126 | + int *tparm_type, | ||
127 | + const char *string) | ||
128 | + { return(*(int *)0); } | ||
129 | + | ||
130 | #undef _nc_tiparm | ||
131 | char *_nc_tiparm( | ||
132 | int expected, | ||
133 | diff --git a/ncurses/llib-ltinfo b/ncurses/llib-ltinfo | ||
134 | index a5cd7cd3..31e5e9a6 100644 | ||
135 | --- a/ncurses/llib-ltinfo | ||
136 | +++ b/ncurses/llib-ltinfo | ||
137 | @@ -927,6 +927,21 @@ char *tiparm( | ||
138 | ...) | ||
139 | { return(*(char **)0); } | ||
140 | |||
141 | +#undef tiparm_s | ||
142 | +char *tiparm_s( | ||
143 | + int num_expected, | ||
144 | + int tparm_type, | ||
145 | + const char *string, | ||
146 | + ...) | ||
147 | + { return(*(char **)0); } | ||
148 | + | ||
149 | +#undef tiscan_s | ||
150 | +int tiscan_s( | ||
151 | + int *num_expected, | ||
152 | + int *tparm_type, | ||
153 | + const char *string) | ||
154 | + { return(*(int *)0); } | ||
155 | + | ||
156 | #undef _nc_tiparm | ||
157 | char *_nc_tiparm( | ||
158 | int expected, | ||
159 | diff --git a/ncurses/llib-ltinfot b/ncurses/llib-ltinfot | ||
160 | index bd3de812..48e5c25a 100644 | ||
161 | --- a/ncurses/llib-ltinfot | ||
162 | +++ b/ncurses/llib-ltinfot | ||
163 | @@ -1003,6 +1003,21 @@ char *tiparm( | ||
164 | ...) | ||
165 | { return(*(char **)0); } | ||
166 | |||
167 | +#undef tiparm_s | ||
168 | +char *tiparm_s( | ||
169 | + int num_expected, | ||
170 | + int tparm_type, | ||
171 | + const char *string, | ||
172 | + ...) | ||
173 | + { return(*(char **)0); } | ||
174 | + | ||
175 | +#undef tiscan_s | ||
176 | +int tiscan_s( | ||
177 | + int *num_expected, | ||
178 | + int *tparm_type, | ||
179 | + const char *string) | ||
180 | + { return(*(int *)0); } | ||
181 | + | ||
182 | #undef _nc_tiparm | ||
183 | char *_nc_tiparm( | ||
184 | int expected, | ||
185 | diff --git a/ncurses/llib-ltinfotw b/ncurses/llib-ltinfotw | ||
186 | index 4d35a1e1..64dfdfa5 100644 | ||
187 | --- a/ncurses/llib-ltinfotw | ||
188 | +++ b/ncurses/llib-ltinfotw | ||
189 | @@ -1025,6 +1025,21 @@ char *tiparm( | ||
190 | ...) | ||
191 | { return(*(char **)0); } | ||
192 | |||
193 | +#undef tiparm_s | ||
194 | +char *tiparm_s( | ||
195 | + int num_expected, | ||
196 | + int tparm_type, | ||
197 | + const char *string, | ||
198 | + ...) | ||
199 | + { return(*(char **)0); } | ||
200 | + | ||
201 | +#undef tiscan_s | ||
202 | +int tiscan_s( | ||
203 | + int *num_expected, | ||
204 | + int *tparm_type, | ||
205 | + const char *string) | ||
206 | + { return(*(int *)0); } | ||
207 | + | ||
208 | #undef _nc_tiparm | ||
209 | char *_nc_tiparm( | ||
210 | int expected, | ||
211 | diff --git a/ncurses/llib-ltinfow b/ncurses/llib-ltinfow | ||
212 | index db846764..7e17a35f 100644 | ||
213 | --- a/ncurses/llib-ltinfow | ||
214 | +++ b/ncurses/llib-ltinfow | ||
215 | @@ -949,6 +949,21 @@ char *tiparm( | ||
216 | ...) | ||
217 | { return(*(char **)0); } | ||
218 | |||
219 | +#undef tiparm_s | ||
220 | +char *tiparm_s( | ||
221 | + int num_expected, | ||
222 | + int tparm_type, | ||
223 | + const char *string, | ||
224 | + ...) | ||
225 | + { return(*(char **)0); } | ||
226 | + | ||
227 | +#undef tiscan_s | ||
228 | +int tiscan_s( | ||
229 | + int *num_expected, | ||
230 | + int *tparm_type, | ||
231 | + const char *string) | ||
232 | + { return(*(int *)0); } | ||
233 | + | ||
234 | #undef _nc_tiparm | ||
235 | char *_nc_tiparm( | ||
236 | int expected, | ||
237 | diff --git a/ncurses/tinfo/parse_entry.c b/ncurses/tinfo/parse_entry.c | ||
238 | index 14bcb67e..0a0b5637 100644 | ||
239 | --- a/ncurses/tinfo/parse_entry.c | ||
240 | +++ b/ncurses/tinfo/parse_entry.c | ||
241 | @@ -110,7 +110,7 @@ _nc_extend_names(ENTRY * entryp, const char *name, int token_type) | ||
242 | /* Well, we are given a cancel for a name that we don't recognize */ | ||
243 | return _nc_extend_names(entryp, name, STRING); | ||
244 | default: | ||
245 | - return 0; | ||
246 | + return NULL; | ||
247 | } | ||
248 | |||
249 | /* Adjust the 'offset' (insertion-point) to keep the lists of extended | ||
250 | @@ -142,6 +142,11 @@ _nc_extend_names(ENTRY * entryp, const char *name, int token_type) | ||
251 | for (last = (unsigned) (max - 1); last > tindex; last--) | ||
252 | |||
253 | if (!found) { | ||
254 | + char *saved; | ||
255 | + | ||
256 | + if ((saved = _nc_save_str(name)) == NULL) | ||
257 | + return NULL; | ||
258 | + | ||
259 | switch (token_type) { | ||
260 | case BOOLEAN: | ||
261 | tp->ext_Booleans++; | ||
262 | @@ -169,7 +174,7 @@ _nc_extend_names(ENTRY * entryp, const char *name, int token_type) | ||
263 | TYPE_REALLOC(char *, actual, tp->ext_Names); | ||
264 | while (--actual > offset) | ||
265 | tp->ext_Names[actual] = tp->ext_Names[actual - 1]; | ||
266 | - tp->ext_Names[offset] = _nc_save_str(name); | ||
267 | + tp->ext_Names[offset] = saved; | ||
268 | } | ||
269 | |||
270 | temp.nte_name = tp->ext_Names[offset]; | ||
271 | @@ -364,6 +369,8 @@ _nc_parse_entry(ENTRY * entryp, int literal, bool silent) | ||
272 | bool is_use = (strcmp(_nc_curr_token.tk_name, "use") == 0); | ||
273 | bool is_tc = !is_use && (strcmp(_nc_curr_token.tk_name, "tc") == 0); | ||
274 | if (is_use || is_tc) { | ||
275 | + char *saved; | ||
276 | + | ||
277 | if (!VALID_STRING(_nc_curr_token.tk_valstring) | ||
278 | || _nc_curr_token.tk_valstring[0] == '\0') { | ||
279 | _nc_warning("missing name for use-clause"); | ||
280 | @@ -377,11 +384,13 @@ _nc_parse_entry(ENTRY * entryp, int literal, bool silent) | ||
281 | _nc_curr_token.tk_valstring); | ||
282 | continue; | ||
283 | } | ||
284 | - entryp->uses[entryp->nuses].name = _nc_save_str(_nc_curr_token.tk_valstring); | ||
285 | - entryp->uses[entryp->nuses].line = _nc_curr_line; | ||
286 | - entryp->nuses++; | ||
287 | - if (entryp->nuses > 1 && is_tc) { | ||
288 | - BAD_TC_USAGE | ||
289 | + if ((saved = _nc_save_str(_nc_curr_token.tk_valstring)) != NULL) { | ||
290 | + entryp->uses[entryp->nuses].name = saved; | ||
291 | + entryp->uses[entryp->nuses].line = _nc_curr_line; | ||
292 | + entryp->nuses++; | ||
293 | + if (entryp->nuses > 1 && is_tc) { | ||
294 | + BAD_TC_USAGE | ||
295 | + } | ||
296 | } | ||
297 | } else { | ||
298 | /* normal token lookup */ | ||
299 | -- | ||
300 | 2.25.1 | ||
301 | |||
diff --git a/meta/recipes-core/ncurses/files/exit_prototype.patch b/meta/recipes-core/ncurses/files/exit_prototype.patch index fd961512e0..299852d2c0 100644 --- a/meta/recipes-core/ncurses/files/exit_prototype.patch +++ b/meta/recipes-core/ncurses/files/exit_prototype.patch | |||
@@ -1,28 +1,27 @@ | |||
1 | From 4a769a441d7e57a23017c3037cde3e53fb9f35fe Mon Sep 17 00:00:00 2001 | 1 | From af798dceafec8a9ea3f83fc250d784511ca0a29c Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Tue, 30 Aug 2022 15:58:32 -0700 | 3 | Date: Tue, 30 Aug 2022 15:58:32 -0700 |
4 | Subject: [PATCH] Add needed headers for including mbstate_t and exit() | 4 | Subject: [PATCH] Add needed headers for including mbstate_t and exit() |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [Reconfigure will solve it] | 6 | Upstream-Status: Inappropriate [Reconfigure will solve it] |
7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
8 | |||
9 | --- | 8 | --- |
10 | configure | 2 ++ | 9 | configure | 2 ++ |
11 | 1 file changed, 2 insertions(+) | 10 | 1 file changed, 2 insertions(+) |
12 | 11 | ||
13 | diff --git a/configure b/configure | 12 | diff --git a/configure b/configure |
14 | index f377f551..163f8899 100755 | 13 | index 005d44e2..72fa6c23 100755 |
15 | --- a/configure | 14 | --- a/configure |
16 | +++ b/configure | 15 | +++ b/configure |
17 | @@ -3423,6 +3423,7 @@ rm -f "conftest.$ac_objext" "conftest.$ac_ext" | 16 | @@ -3462,6 +3462,7 @@ rm -f "conftest.$ac_objext" "conftest.$ac_ext" |
18 | cat >"conftest.$ac_ext" <<_ACEOF | 17 | cat >"conftest.$ac_ext" <<_ACEOF |
19 | #line 3424 "configure" | 18 | #line 3463 "configure" |
20 | #include "confdefs.h" | 19 | #include "confdefs.h" |
21 | +#include <stdlib.h> | 20 | +#include <stdlib.h> |
22 | $ac_declaration | 21 | $ac_declaration |
23 | int | 22 | int |
24 | main (void) | 23 | main (void) |
25 | @@ -13111,6 +13112,7 @@ cat >"conftest.$ac_ext" <<_ACEOF | 24 | @@ -13533,6 +13534,7 @@ cat >"conftest.$ac_ext" <<_ACEOF |
26 | #include <stdlib.h> | 25 | #include <stdlib.h> |
27 | #include <stdarg.h> | 26 | #include <stdarg.h> |
28 | #include <stdio.h> | 27 | #include <stdio.h> |
diff --git a/meta/recipes-core/ncurses/ncurses.inc b/meta/recipes-core/ncurses/ncurses.inc index 761b6a3d31..f5e37b94da 100644 --- a/meta/recipes-core/ncurses/ncurses.inc +++ b/meta/recipes-core/ncurses/ncurses.inc | |||
@@ -2,7 +2,7 @@ SUMMARY = "The New Curses library" | |||
2 | DESCRIPTION = "SVr4 and XSI-Curses compatible curses library and terminfo tools including tic, infocmp, captoinfo. Supports color, multiple highlights, forms-drawing characters, and automatic recognition of keypad and function-key sequences. Extensions include resizable windows and mouse support on both xterm and Linux console using the gpm library." | 2 | DESCRIPTION = "SVr4 and XSI-Curses compatible curses library and terminfo tools including tic, infocmp, captoinfo. Supports color, multiple highlights, forms-drawing characters, and automatic recognition of keypad and function-key sequences. Extensions include resizable windows and mouse support on both xterm and Linux console using the gpm library." |
3 | HOMEPAGE = "http://www.gnu.org/software/ncurses/ncurses.html" | 3 | HOMEPAGE = "http://www.gnu.org/software/ncurses/ncurses.html" |
4 | LICENSE = "MIT" | 4 | LICENSE = "MIT" |
5 | LIC_FILES_CHKSUM = "file://COPYING;md5=c5a4600fdef86384c41ca33ecc70a4b8;endline=27" | 5 | LIC_FILES_CHKSUM = "file://COPYING;md5=6f291ee54551d9d8d992ecd623fe4bc7;endline=27" |
6 | SECTION = "libs" | 6 | SECTION = "libs" |
7 | DEPENDS = "ncurses-native" | 7 | DEPENDS = "ncurses-native" |
8 | DEPENDS:class-native = "" | 8 | DEPENDS:class-native = "" |
@@ -13,15 +13,13 @@ BINCONFIG = "${bindir}/ncurses5-config ${bindir}/ncursesw5-config \ | |||
13 | inherit autotools binconfig-disabled multilib_header pkgconfig | 13 | inherit autotools binconfig-disabled multilib_header pkgconfig |
14 | 14 | ||
15 | # Upstream has useful patches at times at ftp://invisible-island.net/ncurses/ | 15 | # Upstream has useful patches at times at ftp://invisible-island.net/ncurses/ |
16 | SRC_URI = "git://github.com/mirror/ncurses.git;protocol=https;branch=master" | 16 | SRC_URI = "git://github.com/ThomasDickey/ncurses-snapshots.git;protocol=https;branch=master" |
17 | 17 | ||
18 | EXTRA_AUTORECONF = "-I m4" | 18 | EXTRA_AUTORECONF = "-I m4" |
19 | 19 | ||
20 | CACHED_CONFIGUREVARS = "cf_cv_func_nanosleep=yes" | 20 | CACHED_CONFIGUREVARS = "cf_cv_func_nanosleep=yes" |
21 | CACHED_CONFIGUREVARS:append:linux = " cf_cv_working_poll=yes" | 21 | CACHED_CONFIGUREVARS:append:linux = " cf_cv_working_poll=yes" |
22 | 22 | ||
23 | EXTRASITECONFIG = "CFLAGS='${CFLAGS} -I${SYSROOT_DESTDIR}${includedir}'" | ||
24 | |||
25 | # Whether to enable separate widec libraries; must be 'true' or 'false' | 23 | # Whether to enable separate widec libraries; must be 'true' or 'false' |
26 | # | 24 | # |
27 | # TODO: remove this variable when widec is supported in every setup? | 25 | # TODO: remove this variable when widec is supported in every setup? |
diff --git a/meta/recipes-core/ncurses/ncurses_6.4.bb b/meta/recipes-core/ncurses/ncurses_6.5.bb index 97130c06d6..2e3ee337ea 100644 --- a/meta/recipes-core/ncurses/ncurses_6.4.bb +++ b/meta/recipes-core/ncurses/ncurses_6.5.bb | |||
@@ -4,16 +4,12 @@ SRC_URI += "file://0001-tic-hang.patch \ | |||
4 | file://0002-configure-reproducible.patch \ | 4 | file://0002-configure-reproducible.patch \ |
5 | file://0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch \ | 5 | file://0003-gen-pkgconfig.in-Do-not-include-LDFLAGS-in-generated.patch \ |
6 | file://exit_prototype.patch \ | 6 | file://exit_prototype.patch \ |
7 | file://0001-Fix-CVE-2023-29491.patch \ | ||
8 | file://0001-Updating-reset-code-ncurses-6.4-patch-20231104.patch \ | ||
9 | file://CVE-2023-50495.patch \ | ||
10 | file://CVE-2023-45918.patch \ | ||
11 | " | 7 | " |
12 | # commit id corresponds to the revision in package version | 8 | # commit id corresponds to the revision in package version |
13 | SRCREV = "79b9071f2be20a24c7be031655a5638f6032f29f" | 9 | SRCREV = "1c55d64d9d3e00399a21f04e9cac1e472ab5f70a" |
14 | S = "${WORKDIR}/git" | 10 | S = "${WORKDIR}/git" |
15 | EXTRA_OECONF += "--with-abi-version=5" | 11 | EXTRA_OECONF += "--with-abi-version=5" |
16 | UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)$" | 12 | UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+_\d+)$" |
17 | 13 | ||
18 | # This is needed when using patchlevel versions like 6.1+20181013 | 14 | # This is needed when using patchlevel versions like 6.1+20181013 |
19 | #CVE_VERSION = "${@d.getVar("PV").split('+')[0]}.${@d.getVar("PV").split('+')[1]}" | 15 | #CVE_VERSION = "${@d.getVar("PV").split('+')[0]}.${@d.getVar("PV").split('+')[1]}" |
diff --git a/meta/recipes-core/ncurses/site_config/headers b/meta/recipes-core/ncurses/site_config/headers deleted file mode 100644 index 087b7bfd5e..0000000000 --- a/meta/recipes-core/ncurses/site_config/headers +++ /dev/null | |||
@@ -1,5 +0,0 @@ | |||
1 | curses.h | ||
2 | ncurses/curses.h | ||
3 | ncurses.h | ||
4 | ncurses/termcap.h | ||
5 | |||
diff --git a/meta/recipes-core/newlib/libgloss_git.bb b/meta/recipes-core/newlib/libgloss_git.bb index 7e34e33c7a..3c97a7f296 100644 --- a/meta/recipes-core/newlib/libgloss_git.bb +++ b/meta/recipes-core/newlib/libgloss_git.bb | |||
@@ -6,7 +6,6 @@ FILESEXTRAPATHS:prepend := "${THISDIR}/libgloss:" | |||
6 | 6 | ||
7 | SRC_URI:append = " file://libgloss-build-without-nostdinc.patch" | 7 | SRC_URI:append = " file://libgloss-build-without-nostdinc.patch" |
8 | SRC_URI:append:powerpc = " file://fix-rs6000-crt0.patch" | 8 | SRC_URI:append:powerpc = " file://fix-rs6000-crt0.patch" |
9 | SRC_URI:append:powerpc = " file://fix-rs6000-cflags.patch" | ||
10 | 9 | ||
11 | do_configure() { | 10 | do_configure() { |
12 | ${S}/libgloss/configure ${EXTRA_OECONF} | 11 | ${S}/libgloss/configure ${EXTRA_OECONF} |
diff --git a/meta/recipes-core/newlib/newlib.inc b/meta/recipes-core/newlib/newlib.inc index 6113f5e831..34b0f3f747 100644 --- a/meta/recipes-core/newlib/newlib.inc +++ b/meta/recipes-core/newlib/newlib.inc | |||
@@ -28,6 +28,14 @@ B = "${WORKDIR}/build" | |||
28 | ## disable stdlib | 28 | ## disable stdlib |
29 | TARGET_CC_ARCH:append = " -nostdlib" | 29 | TARGET_CC_ARCH:append = " -nostdlib" |
30 | 30 | ||
31 | # Both the C library and the application should share the same mcmodel. | ||
32 | # Use the medium-any code model for the RISC-V 64 bit implementation, | ||
33 | # since medlow can only access addresses below 0x80000000 and RAM | ||
34 | # starts at 0x80000000 on RISC-V 64 | ||
35 | # Keep RISC-V 32 using -mcmodel=medlow (symbols lie between -2GB:2GB) | ||
36 | TARGET_CFLAGS:append:qemuriscv64 = " -mcmodel=medany" | ||
37 | |||
38 | |||
31 | EXTRA_OECONF = " \ | 39 | EXTRA_OECONF = " \ |
32 | --build=${BUILD_SYS} \ | 40 | --build=${BUILD_SYS} \ |
33 | --target=${TARGET_SYS} \ | 41 | --target=${TARGET_SYS} \ |
diff --git a/meta/recipes-core/psplash/psplash_git.bb b/meta/recipes-core/psplash/psplash_git.bb index c7b96bc6a7..30cf61a2cb 100644 --- a/meta/recipes-core/psplash/psplash_git.bb +++ b/meta/recipes-core/psplash/psplash_git.bb | |||
@@ -103,7 +103,7 @@ python do_compile () { | |||
103 | do_install:append() { | 103 | do_install:append() { |
104 | if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then | 104 | if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then |
105 | install -d ${D}${sysconfdir}/init.d/ | 105 | install -d ${D}${sysconfdir}/init.d/ |
106 | install -m 0755 ${WORKDIR}/psplash-init ${D}${sysconfdir}/init.d/psplash.sh | 106 | install -m 0755 ${UNPACKDIR}/psplash-init ${D}${sysconfdir}/init.d/psplash.sh |
107 | 107 | ||
108 | # make fifo for psplash | 108 | # make fifo for psplash |
109 | install -d ${D}/mnt | 109 | install -d ${D}/mnt |
@@ -112,8 +112,8 @@ do_install:append() { | |||
112 | 112 | ||
113 | if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then | 113 | if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then |
114 | install -d ${D}${systemd_system_unitdir} | 114 | install -d ${D}${systemd_system_unitdir} |
115 | install -m 644 ${WORKDIR}/psplash-start.service ${D}/${systemd_system_unitdir} | 115 | install -m 644 ${UNPACKDIR}/psplash-start.service ${D}/${systemd_system_unitdir} |
116 | install -m 644 ${WORKDIR}/psplash-systemd.service ${D}/${systemd_system_unitdir} | 116 | install -m 644 ${UNPACKDIR}/psplash-systemd.service ${D}/${systemd_system_unitdir} |
117 | fi | 117 | fi |
118 | 118 | ||
119 | install -d ${D}${bindir} | 119 | install -d ${D}${bindir} |
diff --git a/meta/recipes-core/systemd/systemd-boot-native_255.4.bb b/meta/recipes-core/systemd/systemd-boot-native_255.6.bb index 73db59b14e..73db59b14e 100644 --- a/meta/recipes-core/systemd/systemd-boot-native_255.4.bb +++ b/meta/recipes-core/systemd/systemd-boot-native_255.6.bb | |||
diff --git a/meta/recipes-core/systemd/systemd-boot_255.4.bb b/meta/recipes-core/systemd/systemd-boot_255.6.bb index 4ee25ee72f..4ee25ee72f 100644 --- a/meta/recipes-core/systemd/systemd-boot_255.4.bb +++ b/meta/recipes-core/systemd/systemd-boot_255.6.bb | |||
diff --git a/meta/recipes-core/systemd/systemd-bootconf_1.00.bb b/meta/recipes-core/systemd/systemd-bootconf_1.00.bb index 19637546a9..0ec49365d0 100644 --- a/meta/recipes-core/systemd/systemd-bootconf_1.00.bb +++ b/meta/recipes-core/systemd/systemd-bootconf_1.00.bb | |||
@@ -7,7 +7,8 @@ PACKAGE_ARCH = "${MACHINE_ARCH}" | |||
7 | 7 | ||
8 | inherit systemd-boot-cfg | 8 | inherit systemd-boot-cfg |
9 | 9 | ||
10 | S = "${WORKDIR}" | 10 | S = "${WORKDIR}/sources" |
11 | UNPACKDIR = "${S}" | ||
11 | 12 | ||
12 | LABELS = "boot" | 13 | LABELS = "boot" |
13 | 14 | ||
diff --git a/meta/recipes-core/systemd/systemd-compat-units.bb b/meta/recipes-core/systemd/systemd-compat-units.bb index c03d97f9c9..b929545823 100644 --- a/meta/recipes-core/systemd/systemd-compat-units.bb +++ b/meta/recipes-core/systemd/systemd-compat-units.bb | |||
@@ -5,7 +5,8 @@ LICENSE = "MIT" | |||
5 | 5 | ||
6 | PACKAGE_WRITE_DEPS += "systemd-systemctl-native" | 6 | PACKAGE_WRITE_DEPS += "systemd-systemctl-native" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | inherit features_check | 11 | inherit features_check |
11 | 12 | ||
diff --git a/meta/recipes-core/systemd/systemd-conf_1.0.bb b/meta/recipes-core/systemd/systemd-conf_1.0.bb index 752ad7c2b7..b8bea0c25b 100644 --- a/meta/recipes-core/systemd/systemd-conf_1.0.bb +++ b/meta/recipes-core/systemd/systemd-conf_1.0.bb | |||
@@ -21,19 +21,22 @@ SRC_URI = "\ | |||
21 | file://wired.network \ | 21 | file://wired.network \ |
22 | " | 22 | " |
23 | 23 | ||
24 | S = "${WORKDIR}/sources" | ||
25 | UNPACKDIR = "${S}" | ||
26 | |||
24 | do_install() { | 27 | do_install() { |
25 | install -D -m0644 ${UNPACKDIR}/journald.conf ${D}${systemd_unitdir}/journald.conf.d/00-${PN}.conf | 28 | install -D -m0644 ${S}/journald.conf ${D}${systemd_unitdir}/journald.conf.d/00-${PN}.conf |
26 | install -D -m0644 ${UNPACKDIR}/logind.conf ${D}${systemd_unitdir}/logind.conf.d/00-${PN}.conf | 29 | install -D -m0644 ${S}/logind.conf ${D}${systemd_unitdir}/logind.conf.d/00-${PN}.conf |
27 | install -D -m0644 ${UNPACKDIR}/system.conf ${D}${systemd_unitdir}/system.conf.d/00-${PN}.conf | 30 | install -D -m0644 ${S}/system.conf ${D}${systemd_unitdir}/system.conf.d/00-${PN}.conf |
28 | 31 | ||
29 | if ${@bb.utils.contains('PACKAGECONFIG', 'dhcp-ethernet', 'true', 'false', d)}; then | 32 | if ${@bb.utils.contains('PACKAGECONFIG', 'dhcp-ethernet', 'true', 'false', d)}; then |
30 | install -D -m0644 ${UNPACKDIR}/wired.network ${D}${systemd_unitdir}/network/80-wired.network | 33 | install -D -m0644 ${S}/wired.network ${D}${systemd_unitdir}/network/80-wired.network |
31 | fi | 34 | fi |
32 | } | 35 | } |
33 | 36 | ||
34 | # Based on change from YP bug 8141, OE commit 5196d7bacaef1076c361adaa2867be31759c1b52 | 37 | # Based on change from YP bug 8141, OE commit 5196d7bacaef1076c361adaa2867be31759c1b52 |
35 | do_install:append:qemuall() { | 38 | do_install:append:qemuall() { |
36 | install -D -m0644 ${UNPACKDIR}/system.conf-qemuall ${D}${systemd_unitdir}/system.conf.d/01-${PN}.conf | 39 | install -D -m0644 ${S}/system.conf-qemuall ${D}${systemd_unitdir}/system.conf.d/01-${PN}.conf |
37 | } | 40 | } |
38 | 41 | ||
39 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 42 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
diff --git a/meta/recipes-core/systemd/systemd-serialgetty.bb b/meta/recipes-core/systemd/systemd-serialgetty.bb index 987a8bf89d..9035b8c335 100644 --- a/meta/recipes-core/systemd/systemd-serialgetty.bb +++ b/meta/recipes-core/systemd/systemd-serialgetty.bb | |||
@@ -9,7 +9,8 @@ SERIAL_TERM ?= "linux" | |||
9 | 9 | ||
10 | SRC_URI = "file://serial-getty@.service" | 10 | SRC_URI = "file://serial-getty@.service" |
11 | 11 | ||
12 | S = "${WORKDIR}" | 12 | S = "${WORKDIR}/sources" |
13 | UNPACKDIR = "${S}" | ||
13 | 14 | ||
14 | # As this package is tied to systemd, only build it when we're also building systemd. | 15 | # As this package is tied to systemd, only build it when we're also building systemd. |
15 | inherit features_check | 16 | inherit features_check |
diff --git a/meta/recipes-core/systemd/systemd-systemctl-native.bb b/meta/recipes-core/systemd/systemd-systemctl-native.bb index 0036e4dd8b..ffa024caef 100644 --- a/meta/recipes-core/systemd/systemd-systemctl-native.bb +++ b/meta/recipes-core/systemd/systemd-systemctl-native.bb | |||
@@ -8,7 +8,8 @@ inherit native | |||
8 | 8 | ||
9 | SRC_URI = "file://systemctl" | 9 | SRC_URI = "file://systemctl" |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | do_install() { | 14 | do_install() { |
14 | install -d ${D}${bindir} | 15 | install -d ${D}${bindir} |
diff --git a/meta/recipes-core/systemd/systemd.inc b/meta/recipes-core/systemd/systemd.inc index a35db5091e..8bda47dd38 100644 --- a/meta/recipes-core/systemd/systemd.inc +++ b/meta/recipes-core/systemd/systemd.inc | |||
@@ -15,7 +15,7 @@ LICENSE:libsystemd = "LGPL-2.1-or-later" | |||
15 | LIC_FILES_CHKSUM = "file://LICENSE.GPL2;md5=751419260aa954499f7abaabaa882bbe \ | 15 | LIC_FILES_CHKSUM = "file://LICENSE.GPL2;md5=751419260aa954499f7abaabaa882bbe \ |
16 | file://LICENSE.LGPL2.1;md5=4fbd65380cdd255951079008b364516c" | 16 | file://LICENSE.LGPL2.1;md5=4fbd65380cdd255951079008b364516c" |
17 | 17 | ||
18 | SRCREV = "387a14a7b67b8b76adaed4175e14bb7e39b2f738" | 18 | SRCREV = "5810c25792d4268282dd3892af1a253b690423c9" |
19 | SRCBRANCH = "v255-stable" | 19 | SRCBRANCH = "v255-stable" |
20 | SRC_URI = "git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}" | 20 | SRC_URI = "git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}" |
21 | 21 | ||
diff --git a/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch b/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch index 2aa5dee6b5..49871d16a8 100644 --- a/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch +++ b/meta/recipes-core/systemd/systemd/0001-missing_type.h-add-comparison_fn_t.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 01195eb9f7d59139fb45df506ac6b3968c14a57f Mon Sep 17 00:00:00 2001 | 1 | From d4b0bb9c7651017985fdd75469f37c34ce2f2c50 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 13:55:12 +0800 | 3 | Date: Mon, 25 Feb 2019 13:55:12 +0800 |
4 | Subject: [PATCH 01/22] missing_type.h: add comparison_fn_t | 4 | Subject: [PATCH] missing_type.h: add comparison_fn_t |
5 | 5 | ||
6 | Make it work with musl where comparison_fn_t and is not provided. | 6 | Make it work with musl where comparison_fn_t and is not provided. |
7 | 7 | ||
@@ -56,6 +56,3 @@ index ae91534198..7f67eea38b 100644 | |||
56 | 56 | ||
57 | const char * const catalog_file_dirs[] = { | 57 | const char * const catalog_file_dirs[] = { |
58 | "/usr/local/lib/systemd/catalog/", | 58 | "/usr/local/lib/systemd/catalog/", |
59 | -- | ||
60 | 2.34.1 | ||
61 | |||
diff --git a/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch b/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch index 900a931632..593d6261e1 100644 --- a/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch +++ b/meta/recipes-core/systemd/systemd/0002-add-fallback-parse_printf_format-implementation.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 872b72739e62123867ce6c4f82aa37de24cc3f75 Mon Sep 17 00:00:00 2001 | 1 | From 776913624aaf696a3b2920b8d7506b3aae042bf1 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Sat, 22 May 2021 20:26:24 +0200 | 3 | Date: Sat, 22 May 2021 20:26:24 +0200 |
4 | Subject: [PATCH 02/22] add fallback parse_printf_format implementation | 4 | Subject: [PATCH] add fallback parse_printf_format implementation |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [musl specific] | 6 | Upstream-Status: Inappropriate [musl specific] |
7 | 7 | ||
@@ -22,10 +22,10 @@ Signed-off-by: Scott Murray <scott.murray@konsulko.com> | |||
22 | create mode 100644 src/basic/parse-printf-format.h | 22 | create mode 100644 src/basic/parse-printf-format.h |
23 | 23 | ||
24 | diff --git a/meson.build b/meson.build | 24 | diff --git a/meson.build b/meson.build |
25 | index 7419e2b0b0..01fd3ffc19 100644 | 25 | index 187e7b216d..c4694a9ddc 100644 |
26 | --- a/meson.build | 26 | --- a/meson.build |
27 | +++ b/meson.build | 27 | +++ b/meson.build |
28 | @@ -725,6 +725,7 @@ endif | 28 | @@ -727,6 +727,7 @@ endif |
29 | foreach header : ['crypt.h', | 29 | foreach header : ['crypt.h', |
30 | 'linux/memfd.h', | 30 | 'linux/memfd.h', |
31 | 'linux/vm_sockets.h', | 31 | 'linux/vm_sockets.h', |
@@ -34,7 +34,7 @@ index 7419e2b0b0..01fd3ffc19 100644 | |||
34 | 'threads.h', | 34 | 'threads.h', |
35 | 'valgrind/memcheck.h', | 35 | 'valgrind/memcheck.h', |
36 | diff --git a/src/basic/meson.build b/src/basic/meson.build | 36 | diff --git a/src/basic/meson.build b/src/basic/meson.build |
37 | index d7450d8b44..c3e3daf4bd 100644 | 37 | index 111253e3a5..bdaa2fc5e4 100644 |
38 | --- a/src/basic/meson.build | 38 | --- a/src/basic/meson.build |
39 | +++ b/src/basic/meson.build | 39 | +++ b/src/basic/meson.build |
40 | @@ -183,6 +183,11 @@ endforeach | 40 | @@ -183,6 +183,11 @@ endforeach |
@@ -429,6 +429,3 @@ index be23b2fe75..69a2eb6404 100644 | |||
429 | 429 | ||
430 | #define SNDBUF_SIZE (8*1024*1024) | 430 | #define SNDBUF_SIZE (8*1024*1024) |
431 | 431 | ||
432 | -- | ||
433 | 2.34.1 | ||
434 | |||
diff --git a/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch b/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch index be231cf6b2..4c73c924ef 100644 --- a/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch +++ b/meta/recipes-core/systemd/systemd/0002-binfmt-Don-t-install-dependency-links-at-install-tim.patch | |||
@@ -1,8 +1,8 @@ | |||
1 | From 29a58009a172e369ad7166e16dab2f4945c6b0d2 Mon Sep 17 00:00:00 2001 | 1 | From 661a34c3de30d4ff4a2a62c9a0573c6ce55424e4 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Thu, 21 Feb 2019 16:23:24 +0800 | 3 | Date: Thu, 21 Feb 2019 16:23:24 +0800 |
4 | Subject: [PATCH 1/2] binfmt: Don't install dependency links at install time | 4 | Subject: [PATCH] binfmt: Don't install dependency links at install time for |
5 | for the binfmt services | 5 | the binfmt services |
6 | 6 | ||
7 | use [Install] blocks so that they get created when the service is enabled | 7 | use [Install] blocks so that they get created when the service is enabled |
8 | like a traditional service. | 8 | like a traditional service. |
@@ -74,6 +74,3 @@ index 6861c76674..531e9fbd90 100644 | |||
74 | + | 74 | + |
75 | +[Install] | 75 | +[Install] |
76 | +WantedBy=sysinit.target | 76 | +WantedBy=sysinit.target |
77 | -- | ||
78 | 2.34.1 | ||
79 | |||
diff --git a/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch b/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch index 5595b5bc23..f09f2114d3 100644 --- a/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch +++ b/meta/recipes-core/systemd/systemd/0003-src-basic-missing.h-check-for-missing-strndupa.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 87f1d38f40c5fe9cadf2b2de442473e4e5605788 Mon Sep 17 00:00:00 2001 | 1 | From 843c6c94bf6a29eaceeefafa420cd86a32e844d2 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 14:18:21 +0800 | 3 | Date: Mon, 25 Feb 2019 14:18:21 +0800 |
4 | Subject: [PATCH 03/22] src/basic/missing.h: check for missing strndupa | 4 | Subject: [PATCH] src/basic/missing.h: check for missing strndupa |
5 | 5 | ||
6 | include missing.h for definition of strndupa | 6 | include missing.h for definition of strndupa |
7 | 7 | ||
@@ -75,10 +75,10 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | |||
75 | 51 files changed, 62 insertions(+) | 75 | 51 files changed, 62 insertions(+) |
76 | 76 | ||
77 | diff --git a/meson.build b/meson.build | 77 | diff --git a/meson.build b/meson.build |
78 | index 01fd3ffc19..61a872b753 100644 | 78 | index c4694a9ddc..d376b76afa 100644 |
79 | --- a/meson.build | 79 | --- a/meson.build |
80 | +++ b/meson.build | 80 | +++ b/meson.build |
81 | @@ -567,6 +567,7 @@ foreach ident : ['secure_getenv', '__secure_getenv'] | 81 | @@ -569,6 +569,7 @@ foreach ident : ['secure_getenv', '__secure_getenv'] |
82 | endforeach | 82 | endforeach |
83 | 83 | ||
84 | foreach ident : [ | 84 | foreach ident : [ |
@@ -87,7 +87,7 @@ index 01fd3ffc19..61a872b753 100644 | |||
87 | ['gettid', '''#include <sys/types.h> | 87 | ['gettid', '''#include <sys/types.h> |
88 | #include <unistd.h>'''], | 88 | #include <unistd.h>'''], |
89 | diff --git a/src/backlight/backlight.c b/src/backlight/backlight.c | 89 | diff --git a/src/backlight/backlight.c b/src/backlight/backlight.c |
90 | index 5ac9f904a9..99d5122dd7 100644 | 90 | index b2032adaa5..ee9201826d 100644 |
91 | --- a/src/backlight/backlight.c | 91 | --- a/src/backlight/backlight.c |
92 | +++ b/src/backlight/backlight.c | 92 | +++ b/src/backlight/backlight.c |
93 | @@ -20,6 +20,7 @@ | 93 | @@ -20,6 +20,7 @@ |
@@ -111,7 +111,7 @@ index 18b16ecc0e..d2be79622f 100644 | |||
111 | static int cg_enumerate_items(const char *controller, const char *path, FILE **ret, const char *item) { | 111 | static int cg_enumerate_items(const char *controller, const char *path, FILE **ret, const char *item) { |
112 | _cleanup_free_ char *fs = NULL; | 112 | _cleanup_free_ char *fs = NULL; |
113 | diff --git a/src/basic/env-util.c b/src/basic/env-util.c | 113 | diff --git a/src/basic/env-util.c b/src/basic/env-util.c |
114 | index d3bf73385f..16b17358ca 100644 | 114 | index a97651d7af..09fa601250 100644 |
115 | --- a/src/basic/env-util.c | 115 | --- a/src/basic/env-util.c |
116 | +++ b/src/basic/env-util.c | 116 | +++ b/src/basic/env-util.c |
117 | @@ -19,6 +19,7 @@ | 117 | @@ -19,6 +19,7 @@ |
@@ -123,7 +123,7 @@ index d3bf73385f..16b17358ca 100644 | |||
123 | /* We follow bash for the character set. Different shells have different rules. */ | 123 | /* We follow bash for the character set. Different shells have different rules. */ |
124 | #define VALID_BASH_ENV_NAME_CHARS \ | 124 | #define VALID_BASH_ENV_NAME_CHARS \ |
125 | diff --git a/src/basic/log.c b/src/basic/log.c | 125 | diff --git a/src/basic/log.c b/src/basic/log.c |
126 | index 1470611a75..9924ec2b9a 100644 | 126 | index 7a443005f6..eed69a48ee 100644 |
127 | --- a/src/basic/log.c | 127 | --- a/src/basic/log.c |
128 | +++ b/src/basic/log.c | 128 | +++ b/src/basic/log.c |
129 | @@ -40,6 +40,7 @@ | 129 | @@ -40,6 +40,7 @@ |
@@ -251,7 +251,7 @@ index f9014dc560..1d7840a5b5 100644 | |||
251 | static clockid_t map_clock_id(clockid_t c) { | 251 | static clockid_t map_clock_id(clockid_t c) { |
252 | 252 | ||
253 | diff --git a/src/boot/bless-boot.c b/src/boot/bless-boot.c | 253 | diff --git a/src/boot/bless-boot.c b/src/boot/bless-boot.c |
254 | index 0c0b4f23c7..68fe5ca509 100644 | 254 | index 12dfdf76fa..e66332519a 100644 |
255 | --- a/src/boot/bless-boot.c | 255 | --- a/src/boot/bless-boot.c |
256 | +++ b/src/boot/bless-boot.c | 256 | +++ b/src/boot/bless-boot.c |
257 | @@ -22,6 +22,7 @@ | 257 | @@ -22,6 +22,7 @@ |
@@ -263,7 +263,7 @@ index 0c0b4f23c7..68fe5ca509 100644 | |||
263 | static char **arg_path = NULL; | 263 | static char **arg_path = NULL; |
264 | 264 | ||
265 | diff --git a/src/core/dbus-cgroup.c b/src/core/dbus-cgroup.c | 265 | diff --git a/src/core/dbus-cgroup.c b/src/core/dbus-cgroup.c |
266 | index 4237e694c0..05f9d9d9a9 100644 | 266 | index 8a9570fd21..ac4202e5ce 100644 |
267 | --- a/src/core/dbus-cgroup.c | 267 | --- a/src/core/dbus-cgroup.c |
268 | +++ b/src/core/dbus-cgroup.c | 268 | +++ b/src/core/dbus-cgroup.c |
269 | @@ -25,6 +25,7 @@ | 269 | @@ -25,6 +25,7 @@ |
@@ -275,7 +275,7 @@ index 4237e694c0..05f9d9d9a9 100644 | |||
275 | 275 | ||
276 | BUS_DEFINE_PROPERTY_GET(bus_property_get_tasks_max, "t", CGroupTasksMax, cgroup_tasks_max_resolve); | 276 | BUS_DEFINE_PROPERTY_GET(bus_property_get_tasks_max, "t", CGroupTasksMax, cgroup_tasks_max_resolve); |
277 | diff --git a/src/core/dbus-execute.c b/src/core/dbus-execute.c | 277 | diff --git a/src/core/dbus-execute.c b/src/core/dbus-execute.c |
278 | index 4daa1cefd3..2c77901471 100644 | 278 | index 2d05ba7e1d..61a7de0037 100644 |
279 | --- a/src/core/dbus-execute.c | 279 | --- a/src/core/dbus-execute.c |
280 | +++ b/src/core/dbus-execute.c | 280 | +++ b/src/core/dbus-execute.c |
281 | @@ -42,6 +42,7 @@ | 281 | @@ -42,6 +42,7 @@ |
@@ -287,10 +287,10 @@ index 4daa1cefd3..2c77901471 100644 | |||
287 | BUS_DEFINE_PROPERTY_GET_ENUM(bus_property_get_exec_output, exec_output, ExecOutput); | 287 | BUS_DEFINE_PROPERTY_GET_ENUM(bus_property_get_exec_output, exec_output, ExecOutput); |
288 | static BUS_DEFINE_PROPERTY_GET_ENUM(property_get_exec_input, exec_input, ExecInput); | 288 | static BUS_DEFINE_PROPERTY_GET_ENUM(property_get_exec_input, exec_input, ExecInput); |
289 | diff --git a/src/core/dbus-util.c b/src/core/dbus-util.c | 289 | diff --git a/src/core/dbus-util.c b/src/core/dbus-util.c |
290 | index d680a64268..e59f48103e 100644 | 290 | index 7bb026af48..a86128e40c 100644 |
291 | --- a/src/core/dbus-util.c | 291 | --- a/src/core/dbus-util.c |
292 | +++ b/src/core/dbus-util.c | 292 | +++ b/src/core/dbus-util.c |
293 | @@ -9,6 +9,7 @@ | 293 | @@ -10,6 +10,7 @@ |
294 | #include "unit-printf.h" | 294 | #include "unit-printf.h" |
295 | #include "user-util.h" | 295 | #include "user-util.h" |
296 | #include "unit.h" | 296 | #include "unit.h" |
@@ -299,7 +299,7 @@ index d680a64268..e59f48103e 100644 | |||
299 | int bus_property_get_triggered_unit( | 299 | int bus_property_get_triggered_unit( |
300 | sd_bus *bus, | 300 | sd_bus *bus, |
301 | diff --git a/src/core/execute.c b/src/core/execute.c | 301 | diff --git a/src/core/execute.c b/src/core/execute.c |
302 | index ef0bf88687..bd3da0c401 100644 | 302 | index 8dbdfcf369..531bec4c92 100644 |
303 | --- a/src/core/execute.c | 303 | --- a/src/core/execute.c |
304 | +++ b/src/core/execute.c | 304 | +++ b/src/core/execute.c |
305 | @@ -72,6 +72,7 @@ | 305 | @@ -72,6 +72,7 @@ |
@@ -323,7 +323,7 @@ index b8e3f7aadd..8ce8ca68d8 100644 | |||
323 | #if HAVE_KMOD | 323 | #if HAVE_KMOD |
324 | #include "module-util.h" | 324 | #include "module-util.h" |
325 | diff --git a/src/core/service.c b/src/core/service.c | 325 | diff --git a/src/core/service.c b/src/core/service.c |
326 | index b9eb40c555..268fe7573b 100644 | 326 | index 42fffbbd67..80a8d05a19 100644 |
327 | --- a/src/core/service.c | 327 | --- a/src/core/service.c |
328 | +++ b/src/core/service.c | 328 | +++ b/src/core/service.c |
329 | @@ -45,6 +45,7 @@ | 329 | @@ -45,6 +45,7 @@ |
@@ -359,7 +359,7 @@ index 016f3baa7f..b1def81313 100644 | |||
359 | typedef enum MountPointFlags { | 359 | typedef enum MountPointFlags { |
360 | MOUNT_NOAUTO = 1 << 0, | 360 | MOUNT_NOAUTO = 1 << 0, |
361 | diff --git a/src/journal-remote/journal-remote-main.c b/src/journal-remote/journal-remote-main.c | 361 | diff --git a/src/journal-remote/journal-remote-main.c b/src/journal-remote/journal-remote-main.c |
362 | index da0f20d3ce..f22ce41908 100644 | 362 | index 2d380bc7a7..d3f5612728 100644 |
363 | --- a/src/journal-remote/journal-remote-main.c | 363 | --- a/src/journal-remote/journal-remote-main.c |
364 | +++ b/src/journal-remote/journal-remote-main.c | 364 | +++ b/src/journal-remote/journal-remote-main.c |
365 | @@ -27,6 +27,7 @@ | 365 | @@ -27,6 +27,7 @@ |
@@ -371,7 +371,7 @@ index da0f20d3ce..f22ce41908 100644 | |||
371 | #define PRIV_KEY_FILE CERTIFICATE_ROOT "/private/journal-remote.pem" | 371 | #define PRIV_KEY_FILE CERTIFICATE_ROOT "/private/journal-remote.pem" |
372 | #define CERT_FILE CERTIFICATE_ROOT "/certs/journal-remote.pem" | 372 | #define CERT_FILE CERTIFICATE_ROOT "/certs/journal-remote.pem" |
373 | diff --git a/src/journal/journalctl.c b/src/journal/journalctl.c | 373 | diff --git a/src/journal/journalctl.c b/src/journal/journalctl.c |
374 | index 7f3dcd56a4..41b7cbaaf1 100644 | 374 | index 87e2f28841..58275f41f1 100644 |
375 | --- a/src/journal/journalctl.c | 375 | --- a/src/journal/journalctl.c |
376 | +++ b/src/journal/journalctl.c | 376 | +++ b/src/journal/journalctl.c |
377 | @@ -77,6 +77,7 @@ | 377 | @@ -77,6 +77,7 @@ |
@@ -383,7 +383,7 @@ index 7f3dcd56a4..41b7cbaaf1 100644 | |||
383 | #define DEFAULT_FSS_INTERVAL_USEC (15*USEC_PER_MINUTE) | 383 | #define DEFAULT_FSS_INTERVAL_USEC (15*USEC_PER_MINUTE) |
384 | #define PROCESS_INOTIFY_INTERVAL 1024 /* Every 1,024 messages processed */ | 384 | #define PROCESS_INOTIFY_INTERVAL 1024 /* Every 1,024 messages processed */ |
385 | diff --git a/src/libsystemd/sd-bus/bus-message.c b/src/libsystemd/sd-bus/bus-message.c | 385 | diff --git a/src/libsystemd/sd-bus/bus-message.c b/src/libsystemd/sd-bus/bus-message.c |
386 | index ff0228081f..9066fcb133 100644 | 386 | index ab8b06896d..43f9131205 100644 |
387 | --- a/src/libsystemd/sd-bus/bus-message.c | 387 | --- a/src/libsystemd/sd-bus/bus-message.c |
388 | +++ b/src/libsystemd/sd-bus/bus-message.c | 388 | +++ b/src/libsystemd/sd-bus/bus-message.c |
389 | @@ -19,6 +19,7 @@ | 389 | @@ -19,6 +19,7 @@ |
@@ -407,7 +407,7 @@ index c25c40ff37..57a5da704f 100644 | |||
407 | static int node_vtable_get_userdata( | 407 | static int node_vtable_get_userdata( |
408 | sd_bus *bus, | 408 | sd_bus *bus, |
409 | diff --git a/src/libsystemd/sd-bus/bus-socket.c b/src/libsystemd/sd-bus/bus-socket.c | 409 | diff --git a/src/libsystemd/sd-bus/bus-socket.c b/src/libsystemd/sd-bus/bus-socket.c |
410 | index 3c59d0d615..746922d46f 100644 | 410 | index 5ade8e99aa..7553cf319d 100644 |
411 | --- a/src/libsystemd/sd-bus/bus-socket.c | 411 | --- a/src/libsystemd/sd-bus/bus-socket.c |
412 | +++ b/src/libsystemd/sd-bus/bus-socket.c | 412 | +++ b/src/libsystemd/sd-bus/bus-socket.c |
413 | @@ -29,6 +29,7 @@ | 413 | @@ -29,6 +29,7 @@ |
@@ -419,7 +419,7 @@ index 3c59d0d615..746922d46f 100644 | |||
419 | #define SNDBUF_SIZE (8*1024*1024) | 419 | #define SNDBUF_SIZE (8*1024*1024) |
420 | 420 | ||
421 | diff --git a/src/libsystemd/sd-bus/sd-bus.c b/src/libsystemd/sd-bus/sd-bus.c | 421 | diff --git a/src/libsystemd/sd-bus/sd-bus.c b/src/libsystemd/sd-bus/sd-bus.c |
422 | index 4a0259f8bb..aaa90d2223 100644 | 422 | index 8befc97460..6ee4d4f595 100644 |
423 | --- a/src/libsystemd/sd-bus/sd-bus.c | 423 | --- a/src/libsystemd/sd-bus/sd-bus.c |
424 | +++ b/src/libsystemd/sd-bus/sd-bus.c | 424 | +++ b/src/libsystemd/sd-bus/sd-bus.c |
425 | @@ -46,6 +46,7 @@ | 425 | @@ -46,6 +46,7 @@ |
@@ -443,7 +443,7 @@ index d988588de0..458df8df9a 100644 | |||
443 | #define MAX_SIZE (2*1024*1024) | 443 | #define MAX_SIZE (2*1024*1024) |
444 | 444 | ||
445 | diff --git a/src/libsystemd/sd-journal/sd-journal.c b/src/libsystemd/sd-journal/sd-journal.c | 445 | diff --git a/src/libsystemd/sd-journal/sd-journal.c b/src/libsystemd/sd-journal/sd-journal.c |
446 | index 6b9ff0a4ed..4a5027ad0f 100644 | 446 | index 7a1dd2569f..d187baad47 100644 |
447 | --- a/src/libsystemd/sd-journal/sd-journal.c | 447 | --- a/src/libsystemd/sd-journal/sd-journal.c |
448 | +++ b/src/libsystemd/sd-journal/sd-journal.c | 448 | +++ b/src/libsystemd/sd-journal/sd-journal.c |
449 | @@ -44,6 +44,7 @@ | 449 | @@ -44,6 +44,7 @@ |
@@ -455,7 +455,7 @@ index 6b9ff0a4ed..4a5027ad0f 100644 | |||
455 | #define JOURNAL_FILES_RECHECK_USEC (2 * USEC_PER_SEC) | 455 | #define JOURNAL_FILES_RECHECK_USEC (2 * USEC_PER_SEC) |
456 | 456 | ||
457 | diff --git a/src/login/pam_systemd.c b/src/login/pam_systemd.c | 457 | diff --git a/src/login/pam_systemd.c b/src/login/pam_systemd.c |
458 | index b8da266e27..4bb8dd9496 100644 | 458 | index bf45974ca5..2cb7e930c0 100644 |
459 | --- a/src/login/pam_systemd.c | 459 | --- a/src/login/pam_systemd.c |
460 | +++ b/src/login/pam_systemd.c | 460 | +++ b/src/login/pam_systemd.c |
461 | @@ -35,6 +35,7 @@ | 461 | @@ -35,6 +35,7 @@ |
@@ -503,10 +503,10 @@ index c64e79bdff..eda26b0b9a 100644 | |||
503 | static void setup_logging_once(void) { | 503 | static void setup_logging_once(void) { |
504 | static pthread_once_t once = PTHREAD_ONCE_INIT; | 504 | static pthread_once_t once = PTHREAD_ONCE_INIT; |
505 | diff --git a/src/portable/portable.c b/src/portable/portable.c | 505 | diff --git a/src/portable/portable.c b/src/portable/portable.c |
506 | index d4b448a627..bb26623565 100644 | 506 | index 3b2a37912f..835a3fdeaa 100644 |
507 | --- a/src/portable/portable.c | 507 | --- a/src/portable/portable.c |
508 | +++ b/src/portable/portable.c | 508 | +++ b/src/portable/portable.c |
509 | @@ -40,6 +40,7 @@ | 509 | @@ -42,6 +42,7 @@ |
510 | #include "strv.h" | 510 | #include "strv.h" |
511 | #include "tmpfile-util.h" | 511 | #include "tmpfile-util.h" |
512 | #include "user-util.h" | 512 | #include "user-util.h" |
@@ -551,7 +551,7 @@ index 8b462b5627..183ce1c18e 100644 | |||
551 | struct CGroupInfo { | 551 | struct CGroupInfo { |
552 | char *cgroup_path; | 552 | char *cgroup_path; |
553 | diff --git a/src/shared/bus-unit-util.c b/src/shared/bus-unit-util.c | 553 | diff --git a/src/shared/bus-unit-util.c b/src/shared/bus-unit-util.c |
554 | index 4ee9706847..30c8084847 100644 | 554 | index 50de98941f..d47beccb0b 100644 |
555 | --- a/src/shared/bus-unit-util.c | 555 | --- a/src/shared/bus-unit-util.c |
556 | +++ b/src/shared/bus-unit-util.c | 556 | +++ b/src/shared/bus-unit-util.c |
557 | @@ -50,6 +50,7 @@ | 557 | @@ -50,6 +50,7 @@ |
@@ -599,7 +599,7 @@ index 83e9834bbf..74eaae6f5e 100644 | |||
599 | enum { | 599 | enum { |
600 | IMPORTER_STATE_LINE = 0, /* waiting to read, or reading line */ | 600 | IMPORTER_STATE_LINE = 0, /* waiting to read, or reading line */ |
601 | diff --git a/src/shared/logs-show.c b/src/shared/logs-show.c | 601 | diff --git a/src/shared/logs-show.c b/src/shared/logs-show.c |
602 | index a5d04003bd..10392c132d 100644 | 602 | index 0a31be382f..92d629e7e0 100644 |
603 | --- a/src/shared/logs-show.c | 603 | --- a/src/shared/logs-show.c |
604 | +++ b/src/shared/logs-show.c | 604 | +++ b/src/shared/logs-show.c |
605 | @@ -41,6 +41,7 @@ | 605 | @@ -41,6 +41,7 @@ |
@@ -683,7 +683,7 @@ index ed22c8b679..19ebe20237 100644 | |||
683 | UdevEvent *udev_event_new(sd_device *dev, usec_t exec_delay_usec, sd_netlink *rtnl, int log_level) { | 683 | UdevEvent *udev_event_new(sd_device *dev, usec_t exec_delay_usec, sd_netlink *rtnl, int log_level) { |
684 | UdevEvent *event; | 684 | UdevEvent *event; |
685 | diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c | 685 | diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c |
686 | index 5f12002394..febe345b4c 100644 | 686 | index c242549654..e5b8df5c2d 100644 |
687 | --- a/src/udev/udev-rules.c | 687 | --- a/src/udev/udev-rules.c |
688 | +++ b/src/udev/udev-rules.c | 688 | +++ b/src/udev/udev-rules.c |
689 | @@ -41,6 +41,7 @@ | 689 | @@ -41,6 +41,7 @@ |
@@ -694,6 +694,3 @@ index 5f12002394..febe345b4c 100644 | |||
694 | 694 | ||
695 | #define RULES_DIRS ((const char* const*) CONF_PATHS_STRV("udev/rules.d")) | 695 | #define RULES_DIRS ((const char* const*) CONF_PATHS_STRV("udev/rules.d")) |
696 | 696 | ||
697 | -- | ||
698 | 2.34.1 | ||
699 | |||
diff --git a/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch b/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch index 15877bea88..15d25e9f19 100644 --- a/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch +++ b/meta/recipes-core/systemd/systemd/0004-don-t-fail-if-GLOB_BRACE-and-GLOB_ALTDIRFUNC-is-not-.patch | |||
@@ -1,8 +1,7 @@ | |||
1 | From 5325ab5813617f35f03806ec420829dde7104387 Mon Sep 17 00:00:00 2001 | 1 | From 2903e42c7cd727a2374d21b78950308de792ebe8 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 14:56:21 +0800 | 3 | Date: Mon, 25 Feb 2019 14:56:21 +0800 |
4 | Subject: [PATCH 04/22] don't fail if GLOB_BRACE and GLOB_ALTDIRFUNC is not | 4 | Subject: [PATCH] don't fail if GLOB_BRACE and GLOB_ALTDIRFUNC is not defined |
5 | defined | ||
6 | 5 | ||
7 | If the standard library doesn't provide brace | 6 | If the standard library doesn't provide brace |
8 | expansion users just won't get it. | 7 | expansion users just won't get it. |
@@ -115,7 +114,7 @@ index 9b3e73cce0..3790ba3be5 100644 | |||
115 | 114 | ||
116 | (void) rm_rf(template, REMOVE_ROOT|REMOVE_PHYSICAL); | 115 | (void) rm_rf(template, REMOVE_ROOT|REMOVE_PHYSICAL); |
117 | diff --git a/src/tmpfiles/tmpfiles.c b/src/tmpfiles/tmpfiles.c | 116 | diff --git a/src/tmpfiles/tmpfiles.c b/src/tmpfiles/tmpfiles.c |
118 | index 230ec09b97..2cc5f391d7 100644 | 117 | index 6a1c7725a1..138b3ac561 100644 |
119 | --- a/src/tmpfiles/tmpfiles.c | 118 | --- a/src/tmpfiles/tmpfiles.c |
120 | +++ b/src/tmpfiles/tmpfiles.c | 119 | +++ b/src/tmpfiles/tmpfiles.c |
121 | @@ -73,6 +73,12 @@ | 120 | @@ -73,6 +73,12 @@ |
@@ -131,7 +130,7 @@ index 230ec09b97..2cc5f391d7 100644 | |||
131 | /* This reads all files listed in /etc/tmpfiles.d/?*.conf and creates | 130 | /* This reads all files listed in /etc/tmpfiles.d/?*.conf and creates |
132 | * them in the file system. This is intended to be used to create | 131 | * them in the file system. This is intended to be used to create |
133 | * properly owned directories beneath /tmp, /var/tmp, /run, which are | 132 | * properly owned directories beneath /tmp, /var/tmp, /run, which are |
134 | @@ -2434,7 +2440,9 @@ finish: | 133 | @@ -2426,7 +2432,9 @@ finish: |
135 | 134 | ||
136 | static int glob_item(Context *c, Item *i, action_t action) { | 135 | static int glob_item(Context *c, Item *i, action_t action) { |
137 | _cleanup_globfree_ glob_t g = { | 136 | _cleanup_globfree_ glob_t g = { |
@@ -141,7 +140,7 @@ index 230ec09b97..2cc5f391d7 100644 | |||
141 | }; | 140 | }; |
142 | int r = 0, k; | 141 | int r = 0, k; |
143 | 142 | ||
144 | @@ -2461,7 +2469,9 @@ static int glob_item_recursively( | 143 | @@ -2453,7 +2461,9 @@ static int glob_item_recursively( |
145 | fdaction_t action) { | 144 | fdaction_t action) { |
146 | 145 | ||
147 | _cleanup_globfree_ glob_t g = { | 146 | _cleanup_globfree_ glob_t g = { |
@@ -151,6 +150,3 @@ index 230ec09b97..2cc5f391d7 100644 | |||
151 | }; | 150 | }; |
152 | int r = 0, k; | 151 | int r = 0, k; |
153 | 152 | ||
154 | -- | ||
155 | 2.34.1 | ||
156 | |||
diff --git a/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch b/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch index a1dfca22cd..deb0e83b6d 100644 --- a/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch +++ b/meta/recipes-core/systemd/systemd/0005-add-missing-FTW_-macros-for-musl.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From dad7f897c0de654fa5592fda3e90f874639849f9 Mon Sep 17 00:00:00 2001 | 1 | From dcd9639657e4122b7665b01a067219f990ee3ace Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 15:00:06 +0800 | 3 | Date: Mon, 25 Feb 2019 15:00:06 +0800 |
4 | Subject: [PATCH 05/22] add missing FTW_ macros for musl | 4 | Subject: [PATCH] add missing FTW_ macros for musl |
5 | 5 | ||
6 | This is to avoid build failures like below for musl. | 6 | This is to avoid build failures like below for musl. |
7 | 7 | ||
@@ -39,6 +39,3 @@ index 8684d064ec..70fc2b5376 100644 | |||
39 | 39 | ||
40 | static char **list_nftw = NULL; | 40 | static char **list_nftw = NULL; |
41 | 41 | ||
42 | -- | ||
43 | 2.34.1 | ||
44 | |||
diff --git a/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch b/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch index 4be14b72ec..f02675604d 100644 --- a/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch +++ b/meta/recipes-core/systemd/systemd/0006-Use-uintmax_t-for-handling-rlim_t.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 96e975a2412a20e5f80bd3ab144057d275eb8597 Mon Sep 17 00:00:00 2001 | 1 | From c58933386caa8e45b5e814ec4ff210b18ab401da Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 15:12:41 +0800 | 3 | Date: Mon, 25 Feb 2019 15:12:41 +0800 |
4 | Subject: [PATCH 06/22] Use uintmax_t for handling rlim_t | 4 | Subject: [PATCH] Use uintmax_t for handling rlim_t |
5 | 5 | ||
6 | PRIu{32,64} is not right format to represent rlim_t type | 6 | PRIu{32,64} is not right format to represent rlim_t type |
7 | therefore use %ju and typecast the rlim_t variables to | 7 | therefore use %ju and typecast the rlim_t variables to |
@@ -86,10 +86,10 @@ index c1f0b2b974..61c5412582 100644 | |||
86 | return 1; | 86 | return 1; |
87 | } | 87 | } |
88 | diff --git a/src/core/execute.c b/src/core/execute.c | 88 | diff --git a/src/core/execute.c b/src/core/execute.c |
89 | index bd3da0c401..df1870fd2f 100644 | 89 | index 531bec4c92..602a95f674 100644 |
90 | --- a/src/core/execute.c | 90 | --- a/src/core/execute.c |
91 | +++ b/src/core/execute.c | 91 | +++ b/src/core/execute.c |
92 | @@ -1045,9 +1045,9 @@ void exec_context_dump(const ExecContext *c, FILE* f, const char *prefix) { | 92 | @@ -1041,9 +1041,9 @@ void exec_context_dump(const ExecContext *c, FILE* f, const char *prefix) { |
93 | for (unsigned i = 0; i < RLIM_NLIMITS; i++) | 93 | for (unsigned i = 0; i < RLIM_NLIMITS; i++) |
94 | if (c->rlimit[i]) { | 94 | if (c->rlimit[i]) { |
95 | fprintf(f, "%sLimit%s: " RLIM_FMT "\n", | 95 | fprintf(f, "%sLimit%s: " RLIM_FMT "\n", |
@@ -101,6 +101,3 @@ index bd3da0c401..df1870fd2f 100644 | |||
101 | } | 101 | } |
102 | 102 | ||
103 | if (c->ioprio_set) { | 103 | if (c->ioprio_set) { |
104 | -- | ||
105 | 2.34.1 | ||
106 | |||
diff --git a/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch b/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch index 8d6084239e..1d4ce33712 100644 --- a/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch +++ b/meta/recipes-core/systemd/systemd/0007-don-t-pass-AT_SYMLINK_NOFOLLOW-flag-to-faccessat.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 4842cff4f1329f0b5034b529d56f8ad1f234ac4c Mon Sep 17 00:00:00 2001 | 1 | From 5bf8235bc5c802908aa5d95740350927d87e953a Mon Sep 17 00:00:00 2001 |
2 | From: Andre McCurdy <armccurdy@gmail.com> | 2 | From: Andre McCurdy <armccurdy@gmail.com> |
3 | Date: Tue, 10 Oct 2017 14:33:30 -0700 | 3 | Date: Tue, 10 Oct 2017 14:33:30 -0700 |
4 | Subject: [PATCH 07/22] don't pass AT_SYMLINK_NOFOLLOW flag to faccessat() | 4 | Subject: [PATCH] don't pass AT_SYMLINK_NOFOLLOW flag to faccessat() |
5 | 5 | ||
6 | Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right | 6 | Avoid using AT_SYMLINK_NOFOLLOW flag. It doesn't seem like the right |
7 | thing to do and it's not portable (not supported by musl). See: | 7 | thing to do and it's not portable (not supported by musl). See: |
@@ -31,7 +31,7 @@ Signed-off-by: Andre McCurdy <armccurdy@gmail.com> | |||
31 | 2 files changed, 23 insertions(+), 4 deletions(-) | 31 | 2 files changed, 23 insertions(+), 4 deletions(-) |
32 | 32 | ||
33 | diff --git a/src/basic/fs-util.h b/src/basic/fs-util.h | 33 | diff --git a/src/basic/fs-util.h b/src/basic/fs-util.h |
34 | index 1023ab73ca..c78ff6f27f 100644 | 34 | index 6a1e2e76d1..c3f7235e09 100644 |
35 | --- a/src/basic/fs-util.h | 35 | --- a/src/basic/fs-util.h |
36 | +++ b/src/basic/fs-util.h | 36 | +++ b/src/basic/fs-util.h |
37 | @@ -49,8 +49,27 @@ int futimens_opath(int fd, const struct timespec ts[2]); | 37 | @@ -49,8 +49,27 @@ int futimens_opath(int fd, const struct timespec ts[2]); |
@@ -64,7 +64,7 @@ index 1023ab73ca..c78ff6f27f 100644 | |||
64 | int touch_file(const char *path, bool parents, usec_t stamp, uid_t uid, gid_t gid, mode_t mode); | 64 | int touch_file(const char *path, bool parents, usec_t stamp, uid_t uid, gid_t gid, mode_t mode); |
65 | 65 | ||
66 | diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c | 66 | diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c |
67 | index 569ef466c3..7ae921a113 100644 | 67 | index a4e2dae245..67aa8ea1f2 100644 |
68 | --- a/src/shared/base-filesystem.c | 68 | --- a/src/shared/base-filesystem.c |
69 | +++ b/src/shared/base-filesystem.c | 69 | +++ b/src/shared/base-filesystem.c |
70 | @@ -145,7 +145,7 @@ int base_filesystem_create_fd(int fd, const char *root, uid_t uid, gid_t gid) { | 70 | @@ -145,7 +145,7 @@ int base_filesystem_create_fd(int fd, const char *root, uid_t uid, gid_t gid) { |
@@ -94,6 +94,3 @@ index 569ef466c3..7ae921a113 100644 | |||
94 | continue; | 94 | continue; |
95 | } | 95 | } |
96 | 96 | ||
97 | -- | ||
98 | 2.34.1 | ||
99 | |||
diff --git a/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch b/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch index c1a8bb19fe..1ab23553f9 100644 --- a/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch +++ b/meta/recipes-core/systemd/systemd/0008-Define-glibc-compatible-basename-for-non-glibc-syste.patch | |||
@@ -1,8 +1,7 @@ | |||
1 | From bab07e779ff23d5593bb118efaaa31b60a6dce87 Mon Sep 17 00:00:00 2001 | 1 | From d98f6bd4d8acdc4c1a5bff4c699f5004a6ebccce Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sun, 27 May 2018 08:36:44 -0700 | 3 | Date: Sun, 27 May 2018 08:36:44 -0700 |
4 | Subject: [PATCH 08/22] Define glibc compatible basename() for non-glibc | 4 | Subject: [PATCH] Define glibc compatible basename() for non-glibc systems |
5 | systems | ||
6 | 5 | ||
7 | Fixes builds with musl, even though systemd is adamant about | 6 | Fixes builds with musl, even though systemd is adamant about |
8 | using non-posix basename implementation, we have a way out | 7 | using non-posix basename implementation, we have a way out |
@@ -29,6 +28,3 @@ index b6d8be3083..0a29036c4c 100644 | |||
29 | static inline char* strstr_ptr(const char *haystack, const char *needle) { | 28 | static inline char* strstr_ptr(const char *haystack, const char *needle) { |
30 | if (!haystack || !needle) | 29 | if (!haystack || !needle) |
31 | return NULL; | 30 | return NULL; |
32 | -- | ||
33 | 2.34.1 | ||
34 | |||
diff --git a/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch b/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch index acff18dc43..8b833ce073 100644 --- a/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch +++ b/meta/recipes-core/systemd/systemd/0008-implment-systemd-sysv-install-for-OE.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 5712d56f1cd654d2e5d2e9117ff77fe4c299f76b Mon Sep 17 00:00:00 2001 | 1 | From 5f89867f2b15f9bd411564d04e660cdeedd370ca Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sat, 5 Sep 2015 06:31:47 +0000 | 3 | Date: Sat, 5 Sep 2015 06:31:47 +0000 |
4 | Subject: [PATCH] implment systemd-sysv-install for OE | 4 | Subject: [PATCH] implment systemd-sysv-install for OE |
@@ -38,6 +38,3 @@ index cb58d8243b..000bdf6165 100755 | |||
38 | ;; | 38 | ;; |
39 | *) | 39 | *) |
40 | usage ;; | 40 | usage ;; |
41 | -- | ||
42 | 2.39.2 | ||
43 | |||
diff --git a/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch b/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch index 3ff0177ae3..98ab397eff 100644 --- a/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch +++ b/meta/recipes-core/systemd/systemd/0009-Do-not-disable-buffering-when-writing-to-oom_score_a.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 25093c5017725b8577c444dfea0f42ad85b43522 Mon Sep 17 00:00:00 2001 | 1 | From f3943f58f3c8a9d20dcada56eb5ca5f673a49b3d Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Wed, 4 Jul 2018 15:00:44 +0800 | 3 | Date: Wed, 4 Jul 2018 15:00:44 +0800 |
4 | Subject: [PATCH 09/22] Do not disable buffering when writing to oom_score_adj | 4 | Subject: [PATCH] Do not disable buffering when writing to oom_score_adj |
5 | 5 | ||
6 | On musl, disabling buffering when writing to oom_score_adj will | 6 | On musl, disabling buffering when writing to oom_score_adj will |
7 | cause the following error. | 7 | cause the following error. |
@@ -24,7 +24,7 @@ Signed-off-by: Scott Murray <scott.murray@konsulko.com> | |||
24 | 1 file changed, 1 insertion(+), 1 deletion(-) | 24 | 1 file changed, 1 insertion(+), 1 deletion(-) |
25 | 25 | ||
26 | diff --git a/src/basic/process-util.c b/src/basic/process-util.c | 26 | diff --git a/src/basic/process-util.c b/src/basic/process-util.c |
27 | index 201c5596ae..ea51595b6c 100644 | 27 | index 4492e7ded2..b61a2aba74 100644 |
28 | --- a/src/basic/process-util.c | 28 | --- a/src/basic/process-util.c |
29 | +++ b/src/basic/process-util.c | 29 | +++ b/src/basic/process-util.c |
30 | @@ -1716,7 +1716,7 @@ int set_oom_score_adjust(int value) { | 30 | @@ -1716,7 +1716,7 @@ int set_oom_score_adjust(int value) { |
@@ -36,6 +36,3 @@ index 201c5596ae..ea51595b6c 100644 | |||
36 | } | 36 | } |
37 | 37 | ||
38 | int get_oom_score_adjust(int *ret) { | 38 | int get_oom_score_adjust(int *ret) { |
39 | -- | ||
40 | 2.34.1 | ||
41 | |||
diff --git a/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch b/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch index cf59ac7d06..e1ffe4ddb5 100644 --- a/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch +++ b/meta/recipes-core/systemd/systemd/0010-distinguish-XSI-compliant-strerror_r-from-GNU-specif.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 2adbe9773cd65c48eec9df96868d4a738927c8d9 Mon Sep 17 00:00:00 2001 | 1 | From 5305c5f18236a9d7f2e633ea6ad317e09e8f29d5 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Tue, 10 Jul 2018 15:40:17 +0800 | 3 | Date: Tue, 10 Jul 2018 15:40:17 +0800 |
4 | Subject: [PATCH 10/22] distinguish XSI-compliant strerror_r from GNU-specifi | 4 | Subject: [PATCH] distinguish XSI-compliant strerror_r from GNU-specifi |
5 | strerror_r | 5 | strerror_r |
6 | 6 | ||
7 | XSI-compliant strerror_r and GNU-specifi strerror_r are different. | 7 | XSI-compliant strerror_r and GNU-specifi strerror_r are different. |
@@ -24,10 +24,10 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | |||
24 | 2 files changed, 15 insertions(+), 1 deletion(-) | 24 | 2 files changed, 15 insertions(+), 1 deletion(-) |
25 | 25 | ||
26 | diff --git a/src/libsystemd/sd-bus/bus-error.c b/src/libsystemd/sd-bus/bus-error.c | 26 | diff --git a/src/libsystemd/sd-bus/bus-error.c b/src/libsystemd/sd-bus/bus-error.c |
27 | index 77b2e1a0fd..fdba0e0142 100644 | 27 | index f415797700..a5c6e9a3bd 100644 |
28 | --- a/src/libsystemd/sd-bus/bus-error.c | 28 | --- a/src/libsystemd/sd-bus/bus-error.c |
29 | +++ b/src/libsystemd/sd-bus/bus-error.c | 29 | +++ b/src/libsystemd/sd-bus/bus-error.c |
30 | @@ -408,7 +408,12 @@ static void bus_error_strerror(sd_bus_error *e, int error) { | 30 | @@ -410,7 +410,12 @@ static void bus_error_strerror(sd_bus_error *e, int error) { |
31 | return; | 31 | return; |
32 | 32 | ||
33 | errno = 0; | 33 | errno = 0; |
@@ -40,7 +40,7 @@ index 77b2e1a0fd..fdba0e0142 100644 | |||
40 | if (errno == ERANGE || strlen(x) >= k - 1) { | 40 | if (errno == ERANGE || strlen(x) >= k - 1) { |
41 | free(m); | 41 | free(m); |
42 | k *= 2; | 42 | k *= 2; |
43 | @@ -593,8 +598,12 @@ const char* _bus_error_message(const sd_bus_error *e, int error, char buf[static | 43 | @@ -595,8 +600,12 @@ const char* _bus_error_message(const sd_bus_error *e, int error, char buf[static |
44 | 44 | ||
45 | if (e && e->message) | 45 | if (e && e->message) |
46 | return e->message; | 46 | return e->message; |
@@ -71,6 +71,3 @@ index 69a2eb6404..1561859650 100644 | |||
71 | if (errno == 0) { | 71 | if (errno == 0) { |
72 | char error[STRLEN("ERRNO=") + DECIMAL_STR_MAX(int) + 1]; | 72 | char error[STRLEN("ERRNO=") + DECIMAL_STR_MAX(int) + 1]; |
73 | 73 | ||
74 | -- | ||
75 | 2.34.1 | ||
76 | |||
diff --git a/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch b/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch index e481b2e2e4..3075f9d003 100644 --- a/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch +++ b/meta/recipes-core/systemd/systemd/0011-avoid-redefinition-of-prctl_mm_map-structure.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 49c446cfb78cf74a909bed8c3798b77a5469866a Mon Sep 17 00:00:00 2001 | 1 | From c866c8652f06af4cd0b9bd7eedc606449f432b95 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Mon, 25 Feb 2019 15:44:54 +0800 | 3 | Date: Mon, 25 Feb 2019 15:44:54 +0800 |
4 | Subject: [PATCH 11/22] avoid redefinition of prctl_mm_map structure | 4 | Subject: [PATCH] avoid redefinition of prctl_mm_map structure |
5 | 5 | ||
6 | Fix the following compile failure: | 6 | Fix the following compile failure: |
7 | error: redefinition of 'struct prctl_mm_map' | 7 | error: redefinition of 'struct prctl_mm_map' |
@@ -27,6 +27,3 @@ index 7d9e395c92..88c2d7dfac 100644 | |||
27 | 27 | ||
28 | /* 58319057b7847667f0c9585b9de0e8932b0fdb08 (4.3) */ | 28 | /* 58319057b7847667f0c9585b9de0e8932b0fdb08 (4.3) */ |
29 | #ifndef PR_CAP_AMBIENT | 29 | #ifndef PR_CAP_AMBIENT |
30 | -- | ||
31 | 2.34.1 | ||
32 | |||
diff --git a/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch b/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch index 66be79077e..0fec84724d 100644 --- a/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch +++ b/meta/recipes-core/systemd/systemd/0012-do-not-disable-buffer-in-writing-files.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From e4885a8e60f883d9217e26e1db3754c2906aca31 Mon Sep 17 00:00:00 2001 | 1 | From 8464b845e6df1be303e09274c13e653072701671 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Fri, 1 Mar 2019 15:22:15 +0800 | 3 | Date: Fri, 1 Mar 2019 15:22:15 +0800 |
4 | Subject: [PATCH 12/22] do not disable buffer in writing files | 4 | Subject: [PATCH] do not disable buffer in writing files |
5 | 5 | ||
6 | Do not disable buffer in writing files, otherwise we get | 6 | Do not disable buffer in writing files, otherwise we get |
7 | failure at boot for musl like below. | 7 | failure at boot for musl like below. |
@@ -201,7 +201,7 @@ index 61ac4df1a6..ea18970196 100644 | |||
201 | return r; | 201 | return r; |
202 | 202 | ||
203 | diff --git a/src/core/main.c b/src/core/main.c | 203 | diff --git a/src/core/main.c b/src/core/main.c |
204 | index 3f71cc0947..0e5aec3e9e 100644 | 204 | index 1c0030a75f..7108a87d46 100644 |
205 | --- a/src/core/main.c | 205 | --- a/src/core/main.c |
206 | +++ b/src/core/main.c | 206 | +++ b/src/core/main.c |
207 | @@ -1678,7 +1678,7 @@ static void initialize_core_pattern(bool skip_setup) { | 207 | @@ -1678,7 +1678,7 @@ static void initialize_core_pattern(bool skip_setup) { |
@@ -253,7 +253,7 @@ index 066483e342..5f92dd7064 100644 | |||
253 | log_warning_errno(r, "Failed to drop caches, ignoring: %m"); | 253 | log_warning_errno(r, "Failed to drop caches, ignoring: %m"); |
254 | else | 254 | else |
255 | diff --git a/src/libsystemd/sd-device/sd-device.c b/src/libsystemd/sd-device/sd-device.c | 255 | diff --git a/src/libsystemd/sd-device/sd-device.c b/src/libsystemd/sd-device/sd-device.c |
256 | index 2fbc619a34..09d9591e37 100644 | 256 | index 01e66b4658..f3ea82ca1b 100644 |
257 | --- a/src/libsystemd/sd-device/sd-device.c | 257 | --- a/src/libsystemd/sd-device/sd-device.c |
258 | +++ b/src/libsystemd/sd-device/sd-device.c | 258 | +++ b/src/libsystemd/sd-device/sd-device.c |
259 | @@ -2516,7 +2516,7 @@ _public_ int sd_device_set_sysattr_value(sd_device *device, const char *sysattr, | 259 | @@ -2516,7 +2516,7 @@ _public_ int sd_device_set_sysattr_value(sd_device *device, const char *sysattr, |
@@ -279,10 +279,10 @@ index a5002437c6..b12e6cd9c9 100644 | |||
279 | log_error_errno(r, "Failed to move process: %m"); | 279 | log_error_errno(r, "Failed to move process: %m"); |
280 | goto finish; | 280 | goto finish; |
281 | diff --git a/src/nspawn/nspawn.c b/src/nspawn/nspawn.c | 281 | diff --git a/src/nspawn/nspawn.c b/src/nspawn/nspawn.c |
282 | index 6ab604d3dc..bbec6b686c 100644 | 282 | index 0600f3e014..ea369c32b2 100644 |
283 | --- a/src/nspawn/nspawn.c | 283 | --- a/src/nspawn/nspawn.c |
284 | +++ b/src/nspawn/nspawn.c | 284 | +++ b/src/nspawn/nspawn.c |
285 | @@ -2688,7 +2688,7 @@ static int reset_audit_loginuid(void) { | 285 | @@ -2690,7 +2690,7 @@ static int reset_audit_loginuid(void) { |
286 | if (streq(p, "4294967295")) | 286 | if (streq(p, "4294967295")) |
287 | return 0; | 287 | return 0; |
288 | 288 | ||
@@ -291,7 +291,7 @@ index 6ab604d3dc..bbec6b686c 100644 | |||
291 | if (r < 0) { | 291 | if (r < 0) { |
292 | log_error_errno(r, | 292 | log_error_errno(r, |
293 | "Failed to reset audit login UID. This probably means that your kernel is too\n" | 293 | "Failed to reset audit login UID. This probably means that your kernel is too\n" |
294 | @@ -4141,7 +4141,7 @@ static int setup_uid_map( | 294 | @@ -4143,7 +4143,7 @@ static int setup_uid_map( |
295 | return log_oom(); | 295 | return log_oom(); |
296 | 296 | ||
297 | xsprintf(uid_map, "/proc/" PID_FMT "/uid_map", pid); | 297 | xsprintf(uid_map, "/proc/" PID_FMT "/uid_map", pid); |
@@ -300,7 +300,7 @@ index 6ab604d3dc..bbec6b686c 100644 | |||
300 | if (r < 0) | 300 | if (r < 0) |
301 | return log_error_errno(r, "Failed to write UID map: %m"); | 301 | return log_error_errno(r, "Failed to write UID map: %m"); |
302 | 302 | ||
303 | @@ -4151,7 +4151,7 @@ static int setup_uid_map( | 303 | @@ -4153,7 +4153,7 @@ static int setup_uid_map( |
304 | return log_oom(); | 304 | return log_oom(); |
305 | 305 | ||
306 | xsprintf(uid_map, "/proc/" PID_FMT "/gid_map", pid); | 306 | xsprintf(uid_map, "/proc/" PID_FMT "/gid_map", pid); |
@@ -367,10 +367,10 @@ index 805503f366..01a7ccb291 100644 | |||
367 | log_debug_errno(r, "Failed to turn off coredumps, ignoring: %m"); | 367 | log_debug_errno(r, "Failed to turn off coredumps, ignoring: %m"); |
368 | } | 368 | } |
369 | diff --git a/src/shared/hibernate-util.c b/src/shared/hibernate-util.c | 369 | diff --git a/src/shared/hibernate-util.c b/src/shared/hibernate-util.c |
370 | index 3eb13d48f6..d09b901be1 100644 | 370 | index c3991cfa4c..7d522d8d1f 100644 |
371 | --- a/src/shared/hibernate-util.c | 371 | --- a/src/shared/hibernate-util.c |
372 | +++ b/src/shared/hibernate-util.c | 372 | +++ b/src/shared/hibernate-util.c |
373 | @@ -481,7 +481,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) { | 373 | @@ -501,7 +501,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) { |
374 | 374 | ||
375 | /* We write the offset first since it's safer. Note that this file is only available in 4.17+, so | 375 | /* We write the offset first since it's safer. Note that this file is only available in 4.17+, so |
376 | * fail gracefully if it doesn't exist and we're only overwriting it with 0. */ | 376 | * fail gracefully if it doesn't exist and we're only overwriting it with 0. */ |
@@ -379,7 +379,7 @@ index 3eb13d48f6..d09b901be1 100644 | |||
379 | if (r == -ENOENT) { | 379 | if (r == -ENOENT) { |
380 | if (offset != 0) | 380 | if (offset != 0) |
381 | return log_error_errno(SYNTHETIC_ERRNO(EOPNOTSUPP), | 381 | return log_error_errno(SYNTHETIC_ERRNO(EOPNOTSUPP), |
382 | @@ -497,7 +497,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) { | 382 | @@ -517,7 +517,7 @@ int write_resume_config(dev_t devno, uint64_t offset, const char *device) { |
383 | log_debug("Wrote resume_offset=%s for device '%s' to /sys/power/resume_offset.", | 383 | log_debug("Wrote resume_offset=%s for device '%s' to /sys/power/resume_offset.", |
384 | offset_str, device); | 384 | offset_str, device); |
385 | 385 | ||
@@ -402,7 +402,7 @@ index 1f88e724d0..feb18b320a 100644 | |||
402 | return r; | 402 | return r; |
403 | 403 | ||
404 | diff --git a/src/shared/watchdog.c b/src/shared/watchdog.c | 404 | diff --git a/src/shared/watchdog.c b/src/shared/watchdog.c |
405 | index 4c1a968718..6faf6806a5 100644 | 405 | index 99ccefb227..e4975018ab 100644 |
406 | --- a/src/shared/watchdog.c | 406 | --- a/src/shared/watchdog.c |
407 | +++ b/src/shared/watchdog.c | 407 | +++ b/src/shared/watchdog.c |
408 | @@ -93,7 +93,7 @@ static int set_pretimeout_governor(const char *governor) { | 408 | @@ -93,7 +93,7 @@ static int set_pretimeout_governor(const char *governor) { |
@@ -412,10 +412,10 @@ index 4c1a968718..6faf6806a5 100644 | |||
412 | - WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE); | 412 | - WRITE_STRING_FILE_DISABLE_BUFFER | WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE); |
413 | + WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE); | 413 | + WRITE_STRING_FILE_VERIFY_ON_FAILURE | WRITE_STRING_FILE_VERIFY_IGNORE_NEWLINE); |
414 | if (r < 0) | 414 | if (r < 0) |
415 | return log_error_errno(r, "Failed to set pretimeout_governor to '%s': %m", governor); | 415 | return log_error_errno(r, "Failed to set watchdog pretimeout_governor to '%s': %m", governor); |
416 | 416 | ||
417 | diff --git a/src/sleep/sleep.c b/src/sleep/sleep.c | 417 | diff --git a/src/sleep/sleep.c b/src/sleep/sleep.c |
418 | index 21af3e9e52..6d4b84b5d5 100644 | 418 | index 21062b24e0..262dd71d72 100644 |
419 | --- a/src/sleep/sleep.c | 419 | --- a/src/sleep/sleep.c |
420 | +++ b/src/sleep/sleep.c | 420 | +++ b/src/sleep/sleep.c |
421 | @@ -137,7 +137,7 @@ static int write_state(int fd, char * const *states) { | 421 | @@ -137,7 +137,7 @@ static int write_state(int fd, char * const *states) { |
@@ -437,7 +437,7 @@ index 21af3e9e52..6d4b84b5d5 100644 | |||
437 | log_debug("Using sleep disk mode '%s'.", *mode); | 437 | log_debug("Using sleep disk mode '%s'.", *mode); |
438 | return 0; | 438 | return 0; |
439 | diff --git a/src/storagetm/storagetm.c b/src/storagetm/storagetm.c | 439 | diff --git a/src/storagetm/storagetm.c b/src/storagetm/storagetm.c |
440 | index ae63baaf79..82eeca479a 100644 | 440 | index 16d4fb07d4..aca7506463 100644 |
441 | --- a/src/storagetm/storagetm.c | 441 | --- a/src/storagetm/storagetm.c |
442 | +++ b/src/storagetm/storagetm.c | 442 | +++ b/src/storagetm/storagetm.c |
443 | @@ -186,7 +186,7 @@ static int nvme_subsystem_unlink(NvmeSubsystem *s) { | 443 | @@ -186,7 +186,7 @@ static int nvme_subsystem_unlink(NvmeSubsystem *s) { |
@@ -533,7 +533,7 @@ index ae63baaf79..82eeca479a 100644 | |||
533 | return log_error_errno(r, "Failed to set IP address on NVME port %" PRIu16 ": %m", portnr); | 533 | return log_error_errno(r, "Failed to set IP address on NVME port %" PRIu16 ": %m", portnr); |
534 | 534 | ||
535 | diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c | 535 | diff --git a/src/udev/udev-rules.c b/src/udev/udev-rules.c |
536 | index febe345b4c..a90b610ba1 100644 | 536 | index e5b8df5c2d..63ca15628c 100644 |
537 | --- a/src/udev/udev-rules.c | 537 | --- a/src/udev/udev-rules.c |
538 | +++ b/src/udev/udev-rules.c | 538 | +++ b/src/udev/udev-rules.c |
539 | @@ -2711,7 +2711,6 @@ static int udev_rule_apply_token_to_event( | 539 | @@ -2711,7 +2711,6 @@ static int udev_rule_apply_token_to_event( |
@@ -557,6 +557,3 @@ index 4d82c65f0a..3a3d861b83 100644 | |||
557 | if (r < 0) | 557 | if (r < 0) |
558 | return log_warning_errno(r, "Failed to %s sysfs UTF-8 flag: %m", enable_disable(utf8)); | 558 | return log_warning_errno(r, "Failed to %s sysfs UTF-8 flag: %m", enable_disable(utf8)); |
559 | 559 | ||
560 | -- | ||
561 | 2.34.1 | ||
562 | |||
diff --git a/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch b/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch index 43f75373a6..6d7e002d61 100644 --- a/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch +++ b/meta/recipes-core/systemd/systemd/0013-Handle-__cpu_mask-usage.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 2f90f8463423cfbb7e83fcef42f1071018c3b56e Mon Sep 17 00:00:00 2001 | 1 | From f6b69f28657c09b1181552d85e52ced0e01182e3 Mon Sep 17 00:00:00 2001 |
2 | From: Scott Murray <scott.murray@konsulko.com> | 2 | From: Scott Murray <scott.murray@konsulko.com> |
3 | Date: Fri, 13 Sep 2019 19:26:27 -0400 | 3 | Date: Fri, 13 Sep 2019 19:26:27 -0400 |
4 | Subject: [PATCH 13/22] Handle __cpu_mask usage | 4 | Subject: [PATCH] Handle __cpu_mask usage |
5 | 5 | ||
6 | Fixes errors: | 6 | Fixes errors: |
7 | 7 | ||
@@ -55,6 +55,3 @@ index ea0c58770e..b65c0bd370 100644 | |||
55 | 55 | ||
56 | /* Print information about various types. Useful when diagnosing | 56 | /* Print information about various types. Useful when diagnosing |
57 | * gcc diagnostics on an unfamiliar architecture. */ | 57 | * gcc diagnostics on an unfamiliar architecture. */ |
58 | -- | ||
59 | 2.34.1 | ||
60 | |||
diff --git a/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch b/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch index a751e1ba6f..40b60399aa 100644 --- a/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch +++ b/meta/recipes-core/systemd/systemd/0014-Handle-missing-gshadow.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From b7c827bb44edbb6251c9fcdb80aa03982c0e7bf3 Mon Sep 17 00:00:00 2001 | 1 | From 5442d8ddb285d33b459aefc76962640afd131280 Mon Sep 17 00:00:00 2001 |
2 | From: Alex Kiernan <alex.kiernan@gmail.com> | 2 | From: Alex Kiernan <alex.kiernan@gmail.com> |
3 | Date: Tue, 10 Mar 2020 11:05:20 +0000 | 3 | Date: Tue, 10 Mar 2020 11:05:20 +0000 |
4 | Subject: [PATCH 14/22] Handle missing gshadow | 4 | Subject: [PATCH] Handle missing gshadow |
5 | 5 | ||
6 | gshadow usage is now present in the userdb code. Mask all uses of it to | 6 | gshadow usage is now present in the userdb code. Mask all uses of it to |
7 | allow compilation on musl | 7 | allow compilation on musl |
@@ -168,6 +168,3 @@ index f60d48ace4..e878199a28 100644 | |||
168 | if (r < 0) | 168 | if (r < 0) |
169 | return r; | 169 | return r; |
170 | 170 | ||
171 | -- | ||
172 | 2.34.1 | ||
173 | |||
diff --git a/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch b/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch index e112766a9b..51f0b048fa 100644 --- a/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch +++ b/meta/recipes-core/systemd/systemd/0015-missing_syscall.h-Define-MIPS-ABI-defines-for-musl.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 3dc9d9d410bcce54fddfd94f43f7f77f3aa8e281 Mon Sep 17 00:00:00 2001 | 1 | From 863dd6f502d53a4a2644e031eee2e8dfa3fd05aa Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 12 Apr 2021 23:44:53 -0700 | 3 | Date: Mon, 12 Apr 2021 23:44:53 -0700 |
4 | Subject: [PATCH 15/22] missing_syscall.h: Define MIPS ABI defines for musl | 4 | Subject: [PATCH] missing_syscall.h: Define MIPS ABI defines for musl |
5 | 5 | ||
6 | musl does not define _MIPS_SIM_ABI32, _MIPS_SIM_NABI32, _MIPS_SIM_ABI64 | 6 | musl does not define _MIPS_SIM_ABI32, _MIPS_SIM_NABI32, _MIPS_SIM_ABI64 |
7 | unlike glibc where these are provided by libc headers, therefore define | 7 | unlike glibc where these are provided by libc headers, therefore define |
@@ -16,7 +16,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
16 | 2 files changed, 7 insertions(+) | 16 | 2 files changed, 7 insertions(+) |
17 | 17 | ||
18 | diff --git a/src/basic/missing_syscall.h b/src/basic/missing_syscall.h | 18 | diff --git a/src/basic/missing_syscall.h b/src/basic/missing_syscall.h |
19 | index d795efd8f2..d6729d3c1d 100644 | 19 | index 86280771c4..20d05db506 100644 |
20 | --- a/src/basic/missing_syscall.h | 20 | --- a/src/basic/missing_syscall.h |
21 | +++ b/src/basic/missing_syscall.h | 21 | +++ b/src/basic/missing_syscall.h |
22 | @@ -20,6 +20,12 @@ | 22 | @@ -20,6 +20,12 @@ |
@@ -33,7 +33,7 @@ index d795efd8f2..d6729d3c1d 100644 | |||
33 | #include "missing_keyctl.h" | 33 | #include "missing_keyctl.h" |
34 | #include "missing_stat.h" | 34 | #include "missing_stat.h" |
35 | diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c | 35 | diff --git a/src/shared/base-filesystem.c b/src/shared/base-filesystem.c |
36 | index 7ae921a113..0ef9d1fd39 100644 | 36 | index 67aa8ea1f2..8d9824495e 100644 |
37 | --- a/src/shared/base-filesystem.c | 37 | --- a/src/shared/base-filesystem.c |
38 | +++ b/src/shared/base-filesystem.c | 38 | +++ b/src/shared/base-filesystem.c |
39 | @@ -20,6 +20,7 @@ | 39 | @@ -20,6 +20,7 @@ |
@@ -44,6 +44,3 @@ index 7ae921a113..0ef9d1fd39 100644 | |||
44 | 44 | ||
45 | typedef struct BaseFilesystem { | 45 | typedef struct BaseFilesystem { |
46 | const char *dir; /* directory or symlink to create */ | 46 | const char *dir; /* directory or symlink to create */ |
47 | -- | ||
48 | 2.34.1 | ||
49 | |||
diff --git a/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch b/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch index 0be817e62d..66b8f66806 100644 --- a/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch +++ b/meta/recipes-core/systemd/systemd/0016-pass-correct-parameters-to-getdents64.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 0994b59dba9f248ad31cb7087046dc00b72cb4ea Mon Sep 17 00:00:00 2001 | 1 | From 01de9e5a6e4ad2f361d3eb4d9839c7afd91cc677 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 21 Jan 2022 15:15:11 -0800 | 3 | Date: Fri, 21 Jan 2022 15:15:11 -0800 |
4 | Subject: [PATCH 16/22] pass correct parameters to getdents64 | 4 | Subject: [PATCH] pass correct parameters to getdents64 |
5 | 5 | ||
6 | Fixes | 6 | Fixes |
7 | ../git/src/basic/recurse-dir.c:57:40: error: incompatible pointer types passing 'uint8_t *' (aka 'unsigned char *') to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types] | 7 | ../git/src/basic/recurse-dir.c:57:40: error: incompatible pointer types passing 'uint8_t *' (aka 'unsigned char *') to parameter of type 'struct dirent *' [-Werror,-Wincompatible-pointer-types] |
@@ -32,6 +32,3 @@ index 5e98b7a5d8..aef065047b 100644 | |||
32 | if (n < 0) | 32 | if (n < 0) |
33 | return -errno; | 33 | return -errno; |
34 | if (n == 0) | 34 | if (n == 0) |
35 | -- | ||
36 | 2.34.1 | ||
37 | |||
diff --git a/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch b/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch index 4176522a1c..f86b8c889c 100644 --- a/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch +++ b/meta/recipes-core/systemd/systemd/0017-Adjust-for-musl-headers.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 3c094d443ca30f19114392fd8ef274af6eabc12d Mon Sep 17 00:00:00 2001 | 1 | From a45cf93772ac5055665cc55454509747b9c68ad2 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 21 Jan 2022 22:19:37 -0800 | 3 | Date: Fri, 21 Jan 2022 22:19:37 -0800 |
4 | Subject: [PATCH 17/22] Adjust for musl headers | 4 | Subject: [PATCH] Adjust for musl headers |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [musl specific] | 6 | Upstream-Status: Inappropriate [musl specific] |
7 | 7 | ||
@@ -425,7 +425,7 @@ index 607fe0053c..9ce4005874 100644 | |||
425 | 425 | ||
426 | #include "sd-dhcp-server.h" | 426 | #include "sd-dhcp-server.h" |
427 | diff --git a/src/network/networkd-dhcp4.c b/src/network/networkd-dhcp4.c | 427 | diff --git a/src/network/networkd-dhcp4.c b/src/network/networkd-dhcp4.c |
428 | index efbae6d868..1ea2151d50 100644 | 428 | index 49c452da7c..98aad1f3cd 100644 |
429 | --- a/src/network/networkd-dhcp4.c | 429 | --- a/src/network/networkd-dhcp4.c |
430 | +++ b/src/network/networkd-dhcp4.c | 430 | +++ b/src/network/networkd-dhcp4.c |
431 | @@ -3,7 +3,7 @@ | 431 | @@ -3,7 +3,7 @@ |
@@ -451,7 +451,7 @@ index 32229a3fc7..662a345d6e 100644 | |||
451 | #include "in-addr-util.h" | 451 | #include "in-addr-util.h" |
452 | #include "networkd-address.h" | 452 | #include "networkd-address.h" |
453 | diff --git a/src/network/networkd-link.c b/src/network/networkd-link.c | 453 | diff --git a/src/network/networkd-link.c b/src/network/networkd-link.c |
454 | index ee5f0f2c0a..ea5269a2de 100644 | 454 | index 4ef1be4bad..e5aa53604d 100644 |
455 | --- a/src/network/networkd-link.c | 455 | --- a/src/network/networkd-link.c |
456 | +++ b/src/network/networkd-link.c | 456 | +++ b/src/network/networkd-link.c |
457 | @@ -3,7 +3,7 @@ | 457 | @@ -3,7 +3,7 @@ |
@@ -464,7 +464,7 @@ index ee5f0f2c0a..ea5269a2de 100644 | |||
464 | #include <linux/netdevice.h> | 464 | #include <linux/netdevice.h> |
465 | #include <sys/socket.h> | 465 | #include <sys/socket.h> |
466 | diff --git a/src/network/networkd-ndisc.c b/src/network/networkd-ndisc.c | 466 | diff --git a/src/network/networkd-ndisc.c b/src/network/networkd-ndisc.c |
467 | index ab9eeb13a5..dd96fe7483 100644 | 467 | index 840ccb158d..9f2e85e32f 100644 |
468 | --- a/src/network/networkd-ndisc.c | 468 | --- a/src/network/networkd-ndisc.c |
469 | +++ b/src/network/networkd-ndisc.c | 469 | +++ b/src/network/networkd-ndisc.c |
470 | @@ -6,7 +6,7 @@ | 470 | @@ -6,7 +6,7 @@ |
@@ -477,7 +477,7 @@ index ab9eeb13a5..dd96fe7483 100644 | |||
477 | #include "sd-ndisc.h" | 477 | #include "sd-ndisc.h" |
478 | 478 | ||
479 | diff --git a/src/network/networkd-route.c b/src/network/networkd-route.c | 479 | diff --git a/src/network/networkd-route.c b/src/network/networkd-route.c |
480 | index 7218d799fc..30d5574eae 100644 | 480 | index eb502ae2cf..5b25ee4523 100644 |
481 | --- a/src/network/networkd-route.c | 481 | --- a/src/network/networkd-route.c |
482 | +++ b/src/network/networkd-route.c | 482 | +++ b/src/network/networkd-route.c |
483 | @@ -1,9 +1,5 @@ | 483 | @@ -1,9 +1,5 @@ |
@@ -502,7 +502,7 @@ index 7218d799fc..30d5574eae 100644 | |||
502 | _cleanup_(route_freep) Route *route = NULL; | 502 | _cleanup_(route_freep) Route *route = NULL; |
503 | 503 | ||
504 | diff --git a/src/network/networkd-setlink.c b/src/network/networkd-setlink.c | 504 | diff --git a/src/network/networkd-setlink.c b/src/network/networkd-setlink.c |
505 | index 2298f9ea3a..7d5f87de53 100644 | 505 | index 011ea1fe6e..59dfe733eb 100644 |
506 | --- a/src/network/networkd-setlink.c | 506 | --- a/src/network/networkd-setlink.c |
507 | +++ b/src/network/networkd-setlink.c | 507 | +++ b/src/network/networkd-setlink.c |
508 | @@ -2,7 +2,7 @@ | 508 | @@ -2,7 +2,7 @@ |
@@ -567,6 +567,3 @@ index f528a46b8e..830318cda5 100644 | |||
567 | #include <linux/netdevice.h> | 567 | #include <linux/netdevice.h> |
568 | #include <linux/pci_regs.h> | 568 | #include <linux/pci_regs.h> |
569 | 569 | ||
570 | -- | ||
571 | 2.34.1 | ||
572 | |||
diff --git a/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch b/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch index 75f6b9094a..67388e6807 100644 --- a/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch +++ b/meta/recipes-core/systemd/systemd/0018-test-bus-error-strerror-is-assumed-to-be-GNU-specifi.patch | |||
@@ -1,8 +1,8 @@ | |||
1 | From be02bd0876a061728661535a709d313e39fe1ac3 Mon Sep 17 00:00:00 2001 | 1 | From 20dfb0b5e7113c2e7f82c01488b419dffe8714ab Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Tue, 8 Nov 2022 13:31:34 -0800 | 3 | Date: Tue, 8 Nov 2022 13:31:34 -0800 |
4 | Subject: [PATCH 18/22] test-bus-error: strerror() is assumed to be GNU | 4 | Subject: [PATCH] test-bus-error: strerror() is assumed to be GNU specific |
5 | specific version mark it so | 5 | version mark it so |
6 | 6 | ||
7 | Upstream-Status: Inappropriate [Upstream systemd only supports glibc] | 7 | Upstream-Status: Inappropriate [Upstream systemd only supports glibc] |
8 | 8 | ||
@@ -47,6 +47,3 @@ index 376d532281..967cfd4d67 100644 | |||
47 | 47 | ||
48 | TEST(PROTECT_ERRNO) { | 48 | TEST(PROTECT_ERRNO) { |
49 | errno = 12; | 49 | errno = 12; |
50 | -- | ||
51 | 2.34.1 | ||
52 | |||
diff --git a/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch b/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch index e038b73678..36d468959a 100644 --- a/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch +++ b/meta/recipes-core/systemd/systemd/0019-errno-util-Make-STRERROR-portable-for-musl.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 46d80840bfe37e67d4f18c37a77751ea1fe63a07 Mon Sep 17 00:00:00 2001 | 1 | From 32371040c2aa649e23c3b0f2c5ee27995a16526a Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 23 Jan 2023 23:39:46 -0800 | 3 | Date: Mon, 23 Jan 2023 23:39:46 -0800 |
4 | Subject: [PATCH 19/22] errno-util: Make STRERROR portable for musl | 4 | Subject: [PATCH] errno-util: Make STRERROR portable for musl |
5 | 5 | ||
6 | Sadly, systemd has decided to use yet another GNU extention in a macro | 6 | Sadly, systemd has decided to use yet another GNU extention in a macro |
7 | lets make this such that we can use XSI compliant strerror_r() for | 7 | lets make this such that we can use XSI compliant strerror_r() for |
@@ -37,6 +37,3 @@ index 27804e6382..274c1c6ef1 100644 | |||
37 | /* A helper to print an error message or message for functions that return 0 on EOF. | 37 | /* A helper to print an error message or message for functions that return 0 on EOF. |
38 | * Note that we can't use ({ … }) to define a temporary variable, so errnum is | 38 | * Note that we can't use ({ … }) to define a temporary variable, so errnum is |
39 | * evaluated twice. */ | 39 | * evaluated twice. */ |
40 | -- | ||
41 | 2.34.1 | ||
42 | |||
diff --git a/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch b/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch index b83fffe793..f860b5f542 100644 --- a/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch +++ b/meta/recipes-core/systemd/systemd/0020-sd-event-Make-malloc_trim-conditional-on-glibc.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 9eb4867b4e2dbdb2484ae854022aff97e2f0feb3 Mon Sep 17 00:00:00 2001 | 1 | From 751fb8c9a05115f5329cfa25e69afe7657124c20 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Wed, 2 Aug 2023 12:06:27 -0700 | 3 | Date: Wed, 2 Aug 2023 12:06:27 -0700 |
4 | Subject: [PATCH 20/22] sd-event: Make malloc_trim() conditional on glibc | 4 | Subject: [PATCH] sd-event: Make malloc_trim() conditional on glibc |
5 | 5 | ||
6 | musl does not have this API | 6 | musl does not have this API |
7 | 7 | ||
@@ -12,7 +12,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
12 | 1 file changed, 3 insertions(+), 1 deletion(-) | 12 | 1 file changed, 3 insertions(+), 1 deletion(-) |
13 | 13 | ||
14 | diff --git a/src/libsystemd/sd-event/sd-event.c b/src/libsystemd/sd-event/sd-event.c | 14 | diff --git a/src/libsystemd/sd-event/sd-event.c b/src/libsystemd/sd-event/sd-event.c |
15 | index 288798a0dc..6419a7f216 100644 | 15 | index b6899df192..0c03287004 100644 |
16 | --- a/src/libsystemd/sd-event/sd-event.c | 16 | --- a/src/libsystemd/sd-event/sd-event.c |
17 | +++ b/src/libsystemd/sd-event/sd-event.c | 17 | +++ b/src/libsystemd/sd-event/sd-event.c |
18 | @@ -1874,7 +1874,7 @@ _public_ int sd_event_add_exit( | 18 | @@ -1874,7 +1874,7 @@ _public_ int sd_event_add_exit( |
@@ -34,6 +34,3 @@ index 288798a0dc..6419a7f216 100644 | |||
34 | usec_t after_timestamp = now(CLOCK_MONOTONIC); | 34 | usec_t after_timestamp = now(CLOCK_MONOTONIC); |
35 | 35 | ||
36 | if (r > 0) | 36 | if (r > 0) |
37 | -- | ||
38 | 2.34.1 | ||
39 | |||
diff --git a/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch b/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch index 7eff069bb7..ece9873bbc 100644 --- a/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch +++ b/meta/recipes-core/systemd/systemd/0021-shared-Do-not-use-malloc_info-on-musl.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From 502597b9ddd6b145541b23fadca0b1d3ca9f6367 Mon Sep 17 00:00:00 2001 | 1 | From 4fe2a8b1aab7297beac50f6ce46f3df4169de218 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Wed, 2 Aug 2023 12:20:40 -0700 | 3 | Date: Wed, 2 Aug 2023 12:20:40 -0700 |
4 | Subject: [PATCH 21/22] shared: Do not use malloc_info on musl | 4 | Subject: [PATCH] shared: Do not use malloc_info on musl |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [musl-specific] | 6 | Upstream-Status: Inappropriate [musl-specific] |
7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 7 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
@@ -52,6 +52,3 @@ index 8e70e365dd..9e782caec9 100644 | |||
52 | (void) memstream_dump(LOG_INFO, &m); | 52 | (void) memstream_dump(LOG_INFO, &m); |
53 | break; | 53 | break; |
54 | } | 54 | } |
55 | -- | ||
56 | 2.34.1 | ||
57 | |||
diff --git a/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch b/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch index 24f3bf74a0..f517392e58 100644 --- a/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch +++ b/meta/recipes-core/systemd/systemd/0022-avoid-missing-LOCK_EX-declaration.patch | |||
@@ -1,7 +1,7 @@ | |||
1 | From fd52f1764647e03a35e8f0ed0ef952049073ccbd Mon Sep 17 00:00:00 2001 | 1 | From 97d3ed7834bd86615ba4acdbef984c677b7e7791 Mon Sep 17 00:00:00 2001 |
2 | From: Chen Qi <Qi.Chen@windriver.com> | 2 | From: Chen Qi <Qi.Chen@windriver.com> |
3 | Date: Tue, 2 Jan 2024 11:03:27 +0800 | 3 | Date: Tue, 2 Jan 2024 11:03:27 +0800 |
4 | Subject: [PATCH 22/22] avoid missing LOCK_EX declaration | 4 | Subject: [PATCH] avoid missing LOCK_EX declaration |
5 | 5 | ||
6 | This only happens on MUSL. Include sys/file.h to avoid compilation | 6 | This only happens on MUSL. Include sys/file.h to avoid compilation |
7 | error about missing LOCK_EX declaration. | 7 | error about missing LOCK_EX declaration. |
@@ -15,7 +15,7 @@ Signed-off-by: Chen Qi <Qi.Chen@windriver.com> | |||
15 | 2 files changed, 2 insertions(+) | 15 | 2 files changed, 2 insertions(+) |
16 | 16 | ||
17 | diff --git a/src/core/exec-invoke.c b/src/core/exec-invoke.c | 17 | diff --git a/src/core/exec-invoke.c b/src/core/exec-invoke.c |
18 | index 70d963e269..7084811439 100644 | 18 | index fe14ceeb31..521e7b87ff 100644 |
19 | --- a/src/core/exec-invoke.c | 19 | --- a/src/core/exec-invoke.c |
20 | +++ b/src/core/exec-invoke.c | 20 | +++ b/src/core/exec-invoke.c |
21 | @@ -4,6 +4,7 @@ | 21 | @@ -4,6 +4,7 @@ |
@@ -38,6 +38,3 @@ index 5339bc4e5e..0697495f23 100644 | |||
38 | 38 | ||
39 | int lock_dev_console(void); | 39 | int lock_dev_console(void); |
40 | 40 | ||
41 | -- | ||
42 | 2.34.1 | ||
43 | |||
diff --git a/meta/recipes-core/systemd/systemd_255.4.bb b/meta/recipes-core/systemd/systemd_255.6.bb index 62842d43c8..62842d43c8 100644 --- a/meta/recipes-core/systemd/systemd_255.4.bb +++ b/meta/recipes-core/systemd/systemd_255.6.bb | |||
diff --git a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb index 17a01e8640..6ff2ca1bf4 100644 --- a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb +++ b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb | |||
@@ -6,7 +6,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/files/common-licenses/GPL-2.0-only;m | |||
6 | SRC_URI = "file://inittab \ | 6 | SRC_URI = "file://inittab \ |
7 | file://start_getty" | 7 | file://start_getty" |
8 | 8 | ||
9 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
10 | 11 | ||
11 | INHIBIT_DEFAULT_DEPS = "1" | 12 | INHIBIT_DEFAULT_DEPS = "1" |
12 | 13 | ||
diff --git a/meta/recipes-core/udev/udev-extraconf_1.1.bb b/meta/recipes-core/udev/udev-extraconf_1.1.bb index a3e5d12b81..0e2abcd424 100644 --- a/meta/recipes-core/udev/udev-extraconf_1.1.bb +++ b/meta/recipes-core/udev/udev-extraconf_1.1.bb | |||
@@ -13,7 +13,8 @@ SRC_URI = " \ | |||
13 | file://localextra.rules \ | 13 | file://localextra.rules \ |
14 | " | 14 | " |
15 | 15 | ||
16 | S = "${WORKDIR}" | 16 | S = "${WORKDIR}/sources" |
17 | UNPACKDIR = "${S}" | ||
17 | 18 | ||
18 | MOUNT_BASE = "/run/media" | 19 | MOUNT_BASE = "/run/media" |
19 | 20 | ||
diff --git a/meta/recipes-core/volatile-binds/volatile-binds.bb b/meta/recipes-core/volatile-binds/volatile-binds.bb index cca8a65fb4..3597ec7356 100644 --- a/meta/recipes-core/volatile-binds/volatile-binds.bb +++ b/meta/recipes-core/volatile-binds/volatile-binds.bb | |||
@@ -9,7 +9,8 @@ SRC_URI = "\ | |||
9 | file://volatile-binds.service.in \ | 9 | file://volatile-binds.service.in \ |
10 | " | 10 | " |
11 | 11 | ||
12 | S = "${WORKDIR}" | 12 | S = "${WORKDIR}/sources" |
13 | UNPACKDIR = "${S}" | ||
13 | 14 | ||
14 | inherit allarch systemd features_check | 15 | inherit allarch systemd features_check |
15 | 16 | ||
@@ -64,7 +65,6 @@ END | |||
64 | "$var_lib_servicefile" | 65 | "$var_lib_servicefile" |
65 | fi | 66 | fi |
66 | } | 67 | } |
67 | do_compile[dirs] = "${WORKDIR}" | ||
68 | 68 | ||
69 | do_install () { | 69 | do_install () { |
70 | install -d ${D}${base_sbindir} | 70 | install -d ${D}${base_sbindir} |
@@ -82,4 +82,3 @@ do_install () { | |||
82 | ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/etc.conf | 82 | ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/etc.conf |
83 | ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/home.conf | 83 | ln -s /dev/null ${D}${sysconfdir}/tmpfiles.d/home.conf |
84 | } | 84 | } |
85 | do_install[dirs] = "${WORKDIR}" | ||
diff --git a/meta/recipes-core/zlib/site_config/headers b/meta/recipes-core/zlib/site_config/headers deleted file mode 100644 index 50268918aa..0000000000 --- a/meta/recipes-core/zlib/site_config/headers +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | zlib.h | ||
diff --git a/meta/recipes-devtools/apt/apt_2.6.1.bb b/meta/recipes-devtools/apt/apt_2.6.1.bb index fb4ff899d2..68bd7c5407 100644 --- a/meta/recipes-devtools/apt/apt_2.6.1.bb +++ b/meta/recipes-devtools/apt/apt_2.6.1.bb | |||
@@ -49,7 +49,7 @@ DEPENDS += "db gnutls lz4 zlib bzip2 xz libgcrypt xxhash" | |||
49 | 49 | ||
50 | EXTRA_OECMAKE:append = " -DCURRENT_VENDOR=debian -DWITH_DOC=False \ | 50 | EXTRA_OECMAKE:append = " -DCURRENT_VENDOR=debian -DWITH_DOC=False \ |
51 | -DDPKG_DATADIR=${datadir}/dpkg \ | 51 | -DDPKG_DATADIR=${datadir}/dpkg \ |
52 | -DTRIEHASH_EXECUTABLE=${WORKDIR}/triehash \ | 52 | -DTRIEHASH_EXECUTABLE=${UNPACKDIR}/triehash \ |
53 | -DCMAKE_DISABLE_FIND_PACKAGE_ZSTD=True \ | 53 | -DCMAKE_DISABLE_FIND_PACKAGE_ZSTD=True \ |
54 | -DCMAKE_DISABLE_FIND_PACKAGE_SECCOMP=True \ | 54 | -DCMAKE_DISABLE_FIND_PACKAGE_SECCOMP=True \ |
55 | -DWITH_TESTS=False \ | 55 | -DWITH_TESTS=False \ |
diff --git a/meta/recipes-devtools/binutils/binutils-2.42.inc b/meta/recipes-devtools/binutils/binutils-2.42.inc index 3b6f47d4ce..d4e94d0162 100644 --- a/meta/recipes-devtools/binutils/binutils-2.42.inc +++ b/meta/recipes-devtools/binutils/binutils-2.42.inc | |||
@@ -20,7 +20,7 @@ UPSTREAM_CHECK_GITTAGREGEX = "binutils-(?P<pver>\d+_(\d_?)*)" | |||
20 | 20 | ||
21 | CVE_STATUS[CVE-2023-25584] = "cpe-incorrect: Applies only for version 2.40 and earlier" | 21 | CVE_STATUS[CVE-2023-25584] = "cpe-incorrect: Applies only for version 2.40 and earlier" |
22 | 22 | ||
23 | SRCREV ?= "553c7f61b74badf91df484450944675efd9cd485" | 23 | SRCREV ?= "73b22b4481e85635eb978585f405c4433bcc0174" |
24 | BINUTILS_GIT_URI ?= "git://sourceware.org/git/binutils-gdb.git;branch=${SRCBRANCH};protocol=https" | 24 | BINUTILS_GIT_URI ?= "git://sourceware.org/git/binutils-gdb.git;branch=${SRCBRANCH};protocol=https" |
25 | SRC_URI = "\ | 25 | SRC_URI = "\ |
26 | ${BINUTILS_GIT_URI} \ | 26 | ${BINUTILS_GIT_URI} \ |
@@ -36,5 +36,6 @@ SRC_URI = "\ | |||
36 | file://0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \ | 36 | file://0013-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \ |
37 | file://0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch \ | 37 | file://0014-Remove-duplicate-pe-dll.o-entry-deom-targ_extra_ofil.patch \ |
38 | file://0015-gprofng-change-use-of-bignum-to-bigint.patch \ | 38 | file://0015-gprofng-change-use-of-bignum-to-bigint.patch \ |
39 | file://0016-aarch64-Remove-asserts-from-operand-qualifier-decode.patch \ | ||
39 | " | 40 | " |
40 | S = "${WORKDIR}/git" | 41 | S = "${WORKDIR}/git" |
diff --git a/meta/recipes-devtools/binutils/binutils/0016-aarch64-Remove-asserts-from-operand-qualifier-decode.patch b/meta/recipes-devtools/binutils/binutils/0016-aarch64-Remove-asserts-from-operand-qualifier-decode.patch new file mode 100644 index 0000000000..7b52425a38 --- /dev/null +++ b/meta/recipes-devtools/binutils/binutils/0016-aarch64-Remove-asserts-from-operand-qualifier-decode.patch | |||
@@ -0,0 +1,382 @@ | |||
1 | From 5b1c70bfe0d8f84dc28237d6150b7b9d57c791a8 Mon Sep 17 00:00:00 2001 | ||
2 | From: Victor Do Nascimento <victor.donascimento@arm.com> | ||
3 | Date: Tue, 16 Apr 2024 11:49:15 +0100 | ||
4 | Subject: [PATCH] aarch64: Remove asserts from operand qualifier decoders | ||
5 | [PR31595] | ||
6 | |||
7 | Given that the disassembler should never abort when decoding | ||
8 | (potentially random) data, assertion statements in the | ||
9 | `get_*reg_qualifier_from_value' function family prove problematic. | ||
10 | |||
11 | Consider the random 32-bit word W, encoded in a data segment and | ||
12 | encountered on execution of `objdump -D <obj_name>'. | ||
13 | |||
14 | If: | ||
15 | |||
16 | (W & ~opcode_mask) == valid instruction | ||
17 | |||
18 | Then before `print_insn_aarch64_word' has a chance to report the | ||
19 | instruction as potentially undefined, an attempt will be made to have | ||
20 | the qualifiers for the instruction's register operands (if any) | ||
21 | decoded. If the relevant bits do not map onto a valid qualifier for | ||
22 | the matched instruction-like word, an abort will be triggered and the | ||
23 | execution of objdump aborted. | ||
24 | |||
25 | As this scenario is perfectly feasible and, in light of the fact that | ||
26 | objdump must successfully decode all sections of a given object file, | ||
27 | it is not appropriate to assert in this family of functions. | ||
28 | |||
29 | Therefore, we add a new pseudo-qualifier `AARCH64_OPND_QLF_ERR' for | ||
30 | handling invalid qualifier-associated values and re-purpose the | ||
31 | assertion conditions in qualifier-retrieving functions to be the | ||
32 | predicate guarding the returning of the calculated qualifier type. | ||
33 | If the predicate fails, we return this new qualifier and allow the | ||
34 | caller to handle the error as appropriate. | ||
35 | |||
36 | As these functions are called either from within | ||
37 | `aarch64_extract_operand' or `do_special_decoding', both of which are | ||
38 | expected to return non-zero values, it suffices that callers return | ||
39 | zero upon encountering `AARCH64_OPND_QLF_ERR'. | ||
40 | |||
41 | Ar present the error presented in the hypothetical scenario has been | ||
42 | encountered in `get_sreg_qualifier_from_value', but the change is made | ||
43 | to the whole family to keep the interface consistent. | ||
44 | |||
45 | Bug: https://sourceware.org/PR31595 | ||
46 | |||
47 | Upstream-Status: Backport [commit 2601b201e95ea0edab89342ee7137c74e88a8a79] | ||
48 | |||
49 | Signed-off-by: Mark Hatle <mark.hatle@amd.com> | ||
50 | --- | ||
51 | .../testsuite/binutils-all/aarch64/illegal.d | 1 + | ||
52 | .../testsuite/binutils-all/aarch64/illegal.s | 3 + | ||
53 | include/opcode/aarch64.h | 3 + | ||
54 | opcodes/aarch64-dis.c | 98 +++++++++++++++---- | ||
55 | 4 files changed, 87 insertions(+), 18 deletions(-) | ||
56 | |||
57 | diff --git a/binutils/testsuite/binutils-all/aarch64/illegal.d b/binutils/testsuite/binutils-all/aarch64/illegal.d | ||
58 | index 4b90a1d9f39..b69318aec85 100644 | ||
59 | --- a/binutils/testsuite/binutils-all/aarch64/illegal.d | ||
60 | +++ b/binutils/testsuite/binutils-all/aarch64/illegal.d | ||
61 | @@ -8,5 +8,6 @@ Disassembly of section \.text: | ||
62 | |||
63 | 0+000 <.*>: | ||
64 | [ ]+0:[ ]+68ea18cc[ ]+.inst[ ]+0x68ea18cc ; undefined | ||
65 | +[ ]+4:[ ]+9dc39839[ ]+.inst[ ]+0x9dc39839 ; undefined | ||
66 | #pass | ||
67 | |||
68 | diff --git a/binutils/testsuite/binutils-all/aarch64/illegal.s b/binutils/testsuite/binutils-all/aarch64/illegal.s | ||
69 | index 216cbe6f265..43668c6db55 100644 | ||
70 | --- a/binutils/testsuite/binutils-all/aarch64/illegal.s | ||
71 | +++ b/binutils/testsuite/binutils-all/aarch64/illegal.s | ||
72 | @@ -4,4 +4,7 @@ | ||
73 | # ldpsw x12, x6, [x6],#-8 ; illegal because one of the dest regs is also the address reg | ||
74 | .inst 0x68ea18cc | ||
75 | |||
76 | + # illegal, resembles the opcode `ldapur' with invalid qualifier bits | ||
77 | + .inst 0x9dc39839 | ||
78 | + | ||
79 | # FIXME: Add more illegal instructions here. | ||
80 | diff --git a/include/opcode/aarch64.h b/include/opcode/aarch64.h | ||
81 | index 2fca9528c20..e8fe93ef127 100644 | ||
82 | --- a/include/opcode/aarch64.h | ||
83 | +++ b/include/opcode/aarch64.h | ||
84 | @@ -894,6 +894,9 @@ enum aarch64_opnd_qualifier | ||
85 | /* Special qualifier helping retrieve qualifier information during the | ||
86 | decoding time (currently not in use). */ | ||
87 | AARCH64_OPND_QLF_RETRIEVE, | ||
88 | + | ||
89 | + /* Special qualifier used for indicating error in qualifier retrieval. */ | ||
90 | + AARCH64_OPND_QLF_ERR, | ||
91 | }; | ||
92 | |||
93 | /* Instruction class. */ | ||
94 | diff --git a/opcodes/aarch64-dis.c b/opcodes/aarch64-dis.c | ||
95 | index 96f42ae862a..b70e6da9eb7 100644 | ||
96 | --- a/opcodes/aarch64-dis.c | ||
97 | +++ b/opcodes/aarch64-dis.c | ||
98 | @@ -219,9 +219,10 @@ static inline enum aarch64_opnd_qualifier | ||
99 | get_greg_qualifier_from_value (aarch64_insn value) | ||
100 | { | ||
101 | enum aarch64_opnd_qualifier qualifier = AARCH64_OPND_QLF_W + value; | ||
102 | - assert (value <= 0x1 | ||
103 | - && aarch64_get_qualifier_standard_value (qualifier) == value); | ||
104 | - return qualifier; | ||
105 | + if (value <= 0x1 | ||
106 | + && aarch64_get_qualifier_standard_value (qualifier) == value) | ||
107 | + return qualifier; | ||
108 | + return AARCH64_OPND_QLF_ERR; | ||
109 | } | ||
110 | |||
111 | /* Given VALUE, return qualifier for a vector register. This does not support | ||
112 | @@ -237,9 +238,10 @@ get_vreg_qualifier_from_value (aarch64_insn value) | ||
113 | if (qualifier >= AARCH64_OPND_QLF_V_2H) | ||
114 | qualifier += 1; | ||
115 | |||
116 | - assert (value <= 0x8 | ||
117 | - && aarch64_get_qualifier_standard_value (qualifier) == value); | ||
118 | - return qualifier; | ||
119 | + if (value <= 0x8 | ||
120 | + && aarch64_get_qualifier_standard_value (qualifier) == value) | ||
121 | + return qualifier; | ||
122 | + return AARCH64_OPND_QLF_ERR; | ||
123 | } | ||
124 | |||
125 | /* Given VALUE, return qualifier for an FP or AdvSIMD scalar register. */ | ||
126 | @@ -248,9 +250,10 @@ get_sreg_qualifier_from_value (aarch64_insn value) | ||
127 | { | ||
128 | enum aarch64_opnd_qualifier qualifier = AARCH64_OPND_QLF_S_B + value; | ||
129 | |||
130 | - assert (value <= 0x4 | ||
131 | - && aarch64_get_qualifier_standard_value (qualifier) == value); | ||
132 | - return qualifier; | ||
133 | + if (value <= 0x4 | ||
134 | + && aarch64_get_qualifier_standard_value (qualifier) == value) | ||
135 | + return qualifier; | ||
136 | + return AARCH64_OPND_QLF_ERR; | ||
137 | } | ||
138 | |||
139 | /* Given the instruction in *INST which is probably half way through the | ||
140 | @@ -263,13 +266,17 @@ get_expected_qualifier (const aarch64_inst *inst, int i) | ||
141 | { | ||
142 | aarch64_opnd_qualifier_seq_t qualifiers; | ||
143 | /* Should not be called if the qualifier is known. */ | ||
144 | - assert (inst->operands[i].qualifier == AARCH64_OPND_QLF_NIL); | ||
145 | - int invalid_count; | ||
146 | - if (aarch64_find_best_match (inst, inst->opcode->qualifiers_list, | ||
147 | - i, qualifiers, &invalid_count)) | ||
148 | - return qualifiers[i]; | ||
149 | + if (inst->operands[i].qualifier == AARCH64_OPND_QLF_NIL) | ||
150 | + { | ||
151 | + int invalid_count; | ||
152 | + if (aarch64_find_best_match (inst, inst->opcode->qualifiers_list, | ||
153 | + i, qualifiers, &invalid_count)) | ||
154 | + return qualifiers[i]; | ||
155 | + else | ||
156 | + return AARCH64_OPND_QLF_NIL; | ||
157 | + } | ||
158 | else | ||
159 | - return AARCH64_OPND_QLF_NIL; | ||
160 | + return AARCH64_OPND_QLF_ERR; | ||
161 | } | ||
162 | |||
163 | /* Operand extractors. */ | ||
164 | @@ -355,6 +362,8 @@ aarch64_ext_reglane (const aarch64_operand *self, aarch64_opnd_info *info, | ||
165 | aarch64_insn value = extract_field (FLD_imm4_11, code, 0); | ||
166 | /* Depend on AARCH64_OPND_Ed to determine the qualifier. */ | ||
167 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
168 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
169 | + return 0; | ||
170 | shift = get_logsz (aarch64_get_qualifier_esize (info->qualifier)); | ||
171 | info->reglane.index = value >> shift; | ||
172 | } | ||
173 | @@ -374,6 +383,8 @@ aarch64_ext_reglane (const aarch64_operand *self, aarch64_opnd_info *info, | ||
174 | if (pos > 3) | ||
175 | return false; | ||
176 | info->qualifier = get_sreg_qualifier_from_value (pos); | ||
177 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
178 | + return 0; | ||
179 | info->reglane.index = (unsigned) (value >> 1); | ||
180 | } | ||
181 | } | ||
182 | @@ -381,6 +392,8 @@ aarch64_ext_reglane (const aarch64_operand *self, aarch64_opnd_info *info, | ||
183 | { | ||
184 | /* Need information in other operand(s) to help decoding. */ | ||
185 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
186 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
187 | + return 0; | ||
188 | switch (info->qualifier) | ||
189 | { | ||
190 | case AARCH64_OPND_QLF_S_4B: | ||
191 | @@ -405,6 +418,8 @@ aarch64_ext_reglane (const aarch64_operand *self, aarch64_opnd_info *info, | ||
192 | |||
193 | /* Need information in other operand(s) to help decoding. */ | ||
194 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
195 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
196 | + return 0; | ||
197 | switch (info->qualifier) | ||
198 | { | ||
199 | case AARCH64_OPND_QLF_S_H: | ||
200 | @@ -644,9 +659,15 @@ aarch64_ext_advsimd_imm_shift (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
201 | 1xxx 1 2D */ | ||
202 | info->qualifier = | ||
203 | get_vreg_qualifier_from_value ((pos << 1) | (int) Q); | ||
204 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
205 | + return false; | ||
206 | } | ||
207 | else | ||
208 | - info->qualifier = get_sreg_qualifier_from_value (pos); | ||
209 | + { | ||
210 | + info->qualifier = get_sreg_qualifier_from_value (pos); | ||
211 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
212 | + return 0; | ||
213 | + } | ||
214 | |||
215 | if (info->type == AARCH64_OPND_IMM_VLSR) | ||
216 | /* immh <shift> | ||
217 | @@ -773,6 +794,8 @@ aarch64_ext_advsimd_imm_modified (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
218 | |||
219 | /* cmode */ | ||
220 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
221 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
222 | + return 0; | ||
223 | switch (info->qualifier) | ||
224 | { | ||
225 | case AARCH64_OPND_QLF_NIL: | ||
226 | @@ -1014,6 +1037,8 @@ aarch64_ext_ft (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
227 | if (value > 0x4) | ||
228 | return false; | ||
229 | info->qualifier = get_sreg_qualifier_from_value (value); | ||
230 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
231 | + return 0; | ||
232 | } | ||
233 | |||
234 | return true; | ||
235 | @@ -1086,6 +1111,8 @@ aarch64_ext_rcpc3_addr_offset (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
236 | aarch64_operand_error *errors ATTRIBUTE_UNUSED) | ||
237 | { | ||
238 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
239 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
240 | + return 0; | ||
241 | |||
242 | /* Rn */ | ||
243 | info->addr.base_regno = extract_field (self->fields[0], code, 0); | ||
244 | @@ -1105,6 +1132,8 @@ aarch64_ext_addr_offset (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
245 | aarch64_operand_error *errors ATTRIBUTE_UNUSED) | ||
246 | { | ||
247 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
248 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
249 | + return 0; | ||
250 | |||
251 | /* Rn */ | ||
252 | info->addr.base_regno = extract_field (self->fields[0], code, 0); | ||
253 | @@ -1154,6 +1183,8 @@ aarch64_ext_addr_regoff (const aarch64_operand *self ATTRIBUTE_UNUSED, | ||
254 | /* Need information in other operand(s) to help achieve the decoding | ||
255 | from 'S' field. */ | ||
256 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
257 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
258 | + return 0; | ||
259 | /* Get the size of the data element that is accessed, which may be | ||
260 | different from that of the source register size, e.g. in strb/ldrb. */ | ||
261 | size = aarch64_get_qualifier_esize (info->qualifier); | ||
262 | @@ -1172,6 +1203,8 @@ aarch64_ext_addr_simm (const aarch64_operand *self, aarch64_opnd_info *info, | ||
263 | { | ||
264 | aarch64_insn imm; | ||
265 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
266 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
267 | + return 0; | ||
268 | |||
269 | /* Rn */ | ||
270 | info->addr.base_regno = extract_field (FLD_Rn, code, 0); | ||
271 | @@ -1210,6 +1243,8 @@ aarch64_ext_addr_uimm12 (const aarch64_operand *self, aarch64_opnd_info *info, | ||
272 | { | ||
273 | int shift; | ||
274 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
275 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
276 | + return 0; | ||
277 | shift = get_logsz (aarch64_get_qualifier_esize (info->qualifier)); | ||
278 | /* Rn */ | ||
279 | info->addr.base_regno = extract_field (self->fields[0], code, 0); | ||
280 | @@ -1228,6 +1263,8 @@ aarch64_ext_addr_simm10 (const aarch64_operand *self, aarch64_opnd_info *info, | ||
281 | aarch64_insn imm; | ||
282 | |||
283 | info->qualifier = get_expected_qualifier (inst, info->idx); | ||
284 | + if (info->qualifier == AARCH64_OPND_QLF_ERR) | ||
285 | + return 0; | ||
286 | /* Rn */ | ||
287 | info->addr.base_regno = extract_field (self->fields[0], code, 0); | ||
288 | /* simm10 */ | ||
289 | @@ -2467,6 +2504,8 @@ decode_sizeq (aarch64_inst *inst) | ||
290 | if (mask == 0x7) | ||
291 | { | ||
292 | inst->operands[idx].qualifier = get_vreg_qualifier_from_value (value); | ||
293 | + if (inst->operands[idx].qualifier == AARCH64_OPND_QLF_ERR) | ||
294 | + return 0; | ||
295 | return 1; | ||
296 | } | ||
297 | |||
298 | @@ -2649,6 +2688,8 @@ do_special_decoding (aarch64_inst *inst) | ||
299 | idx = select_operand_for_sf_field_coding (inst->opcode); | ||
300 | value = extract_field (FLD_sf, inst->value, 0); | ||
301 | inst->operands[idx].qualifier = get_greg_qualifier_from_value (value); | ||
302 | + if (inst->operands[idx].qualifier == AARCH64_OPND_QLF_ERR) | ||
303 | + return 0; | ||
304 | if ((inst->opcode->flags & F_N) | ||
305 | && extract_field (FLD_N, inst->value, 0) != value) | ||
306 | return 0; | ||
307 | @@ -2659,6 +2700,8 @@ do_special_decoding (aarch64_inst *inst) | ||
308 | idx = select_operand_for_sf_field_coding (inst->opcode); | ||
309 | value = extract_field (FLD_lse_sz, inst->value, 0); | ||
310 | inst->operands[idx].qualifier = get_greg_qualifier_from_value (value); | ||
311 | + if (inst->operands[idx].qualifier == AARCH64_OPND_QLF_ERR) | ||
312 | + return 0; | ||
313 | } | ||
314 | /* rcpc3 'size' field. */ | ||
315 | if (inst->opcode->flags & F_RCPC3_SIZE) | ||
316 | @@ -2670,12 +2713,18 @@ do_special_decoding (aarch64_inst *inst) | ||
317 | { | ||
318 | if (aarch64_operands[inst->operands[i].type].op_class | ||
319 | == AARCH64_OPND_CLASS_INT_REG) | ||
320 | - inst->operands[i].qualifier = get_greg_qualifier_from_value (value & 1); | ||
321 | + { | ||
322 | + inst->operands[i].qualifier = get_greg_qualifier_from_value (value & 1); | ||
323 | + if (inst->operands[i].qualifier == AARCH64_OPND_QLF_ERR) | ||
324 | + return 0; | ||
325 | + } | ||
326 | else if (aarch64_operands[inst->operands[i].type].op_class | ||
327 | == AARCH64_OPND_CLASS_FP_REG) | ||
328 | { | ||
329 | value += (extract_field (FLD_opc1, inst->value, 0) << 2); | ||
330 | inst->operands[i].qualifier = get_sreg_qualifier_from_value (value); | ||
331 | + if (inst->operands[i].qualifier == AARCH64_OPND_QLF_ERR) | ||
332 | + return 0; | ||
333 | } | ||
334 | } | ||
335 | } | ||
336 | @@ -2709,7 +2758,11 @@ do_special_decoding (aarch64_inst *inst) | ||
337 | /* For most related instruciton, the 'size' field is fully available for | ||
338 | operand encoding. */ | ||
339 | if (mask == 0x3) | ||
340 | - inst->operands[idx].qualifier = get_sreg_qualifier_from_value (value); | ||
341 | + { | ||
342 | + inst->operands[idx].qualifier = get_sreg_qualifier_from_value (value); | ||
343 | + if (inst->operands[idx].qualifier == AARCH64_OPND_QLF_ERR) | ||
344 | + return 0; | ||
345 | + } | ||
346 | else | ||
347 | { | ||
348 | get_operand_possible_qualifiers (idx, inst->opcode->qualifiers_list, | ||
349 | @@ -2744,6 +2797,9 @@ do_special_decoding (aarch64_inst *inst) | ||
350 | Q = (unsigned) extract_field (FLD_Q, inst->value, inst->opcode->mask); | ||
351 | inst->operands[0].qualifier = | ||
352 | get_vreg_qualifier_from_value ((num << 1) | Q); | ||
353 | + if (inst->operands[0].qualifier == AARCH64_OPND_QLF_ERR) | ||
354 | + return 0; | ||
355 | + | ||
356 | } | ||
357 | |||
358 | if ((inst->opcode->flags & F_OPD_SIZE) && inst->opcode->iclass == sve2_urqvs) | ||
359 | @@ -2753,7 +2809,11 @@ do_special_decoding (aarch64_inst *inst) | ||
360 | inst->opcode->mask); | ||
361 | inst->operands[0].qualifier | ||
362 | = get_vreg_qualifier_from_value (1 + (size << 1)); | ||
363 | + if (inst->operands[0].qualifier == AARCH64_OPND_QLF_ERR) | ||
364 | + return 0; | ||
365 | inst->operands[2].qualifier = get_sreg_qualifier_from_value (size); | ||
366 | + if (inst->operands[2].qualifier == AARCH64_OPND_QLF_ERR) | ||
367 | + return 0; | ||
368 | } | ||
369 | |||
370 | if (inst->opcode->flags & F_GPRSIZE_IN_Q) | ||
371 | @@ -2772,6 +2832,8 @@ do_special_decoding (aarch64_inst *inst) | ||
372 | assert (idx == 0 || idx == 1); | ||
373 | value = extract_field (FLD_Q, inst->value, 0); | ||
374 | inst->operands[idx].qualifier = get_greg_qualifier_from_value (value); | ||
375 | + if (inst->operands[idx].qualifier == AARCH64_OPND_QLF_ERR) | ||
376 | + return 0; | ||
377 | } | ||
378 | |||
379 | if (inst->opcode->flags & F_LDS_SIZE) | ||
380 | -- | ||
381 | 2.34.1 | ||
382 | |||
diff --git a/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb b/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.1.bb index 15cc7ac244..fac9f8f7ef 100644 --- a/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.bb +++ b/meta/recipes-devtools/btrfs-tools/btrfs-tools_6.8.1.bb | |||
@@ -18,7 +18,7 @@ DEPENDS = "util-linux zlib" | |||
18 | SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git;branch=master;protocol=https \ | 18 | SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git;branch=master;protocol=https \ |
19 | file://0001-Add-a-possibility-to-specify-where-python-modules-ar.patch \ | 19 | file://0001-Add-a-possibility-to-specify-where-python-modules-ar.patch \ |
20 | " | 20 | " |
21 | SRCREV = "3793e987d2b4e878410da16f33d963043d137d48" | 21 | SRCREV = "5d97c32d6f94cf6f473a5f82964e3edaeb1b146e" |
22 | S = "${WORKDIR}/git" | 22 | S = "${WORKDIR}/git" |
23 | 23 | ||
24 | PACKAGECONFIG ??= " \ | 24 | PACKAGECONFIG ??= " \ |
diff --git a/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb b/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb index bf8be1ad0c..9dc5caf87c 100644 --- a/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb +++ b/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb | |||
@@ -13,23 +13,31 @@ DEPENDS += "gnu-config-native" | |||
13 | SRC_URI = " \ | 13 | SRC_URI = " \ |
14 | ${SOURCEFORGE_MIRROR}/project/cdrtools/cdrtools-${PV}.tar.bz2 \ | 14 | ${SOURCEFORGE_MIRROR}/project/cdrtools/cdrtools-${PV}.tar.bz2 \ |
15 | file://0001-Don-t-set-uid-gid-during-install.patch \ | 15 | file://0001-Don-t-set-uid-gid-during-install.patch \ |
16 | file://riscv64-linux-gcc.rul \ | 16 | file://riscv64-linux-gcc.rul \ |
17 | file://gcc14-fix.patch \ | ||
17 | " | 18 | " |
18 | 19 | ||
19 | SRC_URI[md5sum] = "7d45c5b7e1f78d85d1583b361aee6e8b" | 20 | SRC_URI[md5sum] = "7d45c5b7e1f78d85d1583b361aee6e8b" |
20 | SRC_URI[sha256sum] = "ed282eb6276c4154ce6a0b5dee0bdb81940d0cbbfc7d03f769c4735ef5f5860f" | 21 | SRC_URI[sha256sum] = "ed282eb6276c4154ce6a0b5dee0bdb81940d0cbbfc7d03f769c4735ef5f5860f" |
21 | 22 | ||
22 | EXTRA_OEMAKE = "-e MAKEFLAGS=" | 23 | EXTRA_OEMAKE = "-e MAKEFLAGS= CPPOPTX='${CPPFLAGS}' COPTX='${CFLAGS}' C++OPTX='${CXXFLAGS}' LDOPTX='${LDFLAGS}' GMAKE_NOWARN='true'" |
23 | 24 | ||
24 | # Stop failures when 'cc' can't be found | 25 | # Stop failures when 'cc' can't be found |
25 | export ac_cv_prog_CC = "${CC}" | 26 | export ac_cv_prog_CC = "${CC}" |
26 | 27 | ||
27 | inherit native | 28 | inherit native |
28 | 29 | ||
30 | # Use -std=gnu89 to build with gcc-14 (https://bugs.gentoo.org/903876) | ||
31 | # this needs to be after native inherit (which sets CFLAGS to BUILD_CFLAGS) | ||
32 | CFLAGS += "-std=gnu89" | ||
33 | |||
29 | do_configure() { | 34 | do_configure() { |
35 | # cdda2wav does not build with GCC 14 | ||
36 | rm -f ${S}/TARGETS/55cdda2wav | ||
37 | |||
30 | install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}/autoconf | 38 | install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.sub ${S}/autoconf |
31 | install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}/autoconf | 39 | install -m 0755 ${STAGING_DATADIR_NATIVE}/gnu-config/config.guess ${S}/autoconf |
32 | install -m 0644 ${WORKDIR}/riscv64-linux-gcc.rul ${S}/RULES/ | 40 | install -m 0644 ${UNPACKDIR}/riscv64-linux-gcc.rul ${S}/RULES/ |
33 | } | 41 | } |
34 | 42 | ||
35 | do_install() { | 43 | do_install() { |
diff --git a/meta/recipes-devtools/cdrtools/cdrtools/gcc14-fix.patch b/meta/recipes-devtools/cdrtools/cdrtools/gcc14-fix.patch new file mode 100644 index 0000000000..ce02bb8bcf --- /dev/null +++ b/meta/recipes-devtools/cdrtools/cdrtools/gcc14-fix.patch | |||
@@ -0,0 +1,13 @@ | |||
1 | Signed-off-by: Zoltán Böszörményi <zboszor@gmail.com> | ||
2 | Upstream-Status: Inappropriate [native] | ||
3 | --- cdrtools-3.01/autoconf/configure~ 2015-07-06 23:41:27.000000000 +0200 | ||
4 | +++ cdrtools-3.01/autoconf/configure 2024-05-01 09:37:40.897253690 +0200 | ||
5 | @@ -1205,7 +1205,7 @@ | ||
6 | #line 1206 "configure" | ||
7 | #include "confdefs.h" | ||
8 | |||
9 | -main(){return(0);} | ||
10 | +int main(){return(0);} | ||
11 | EOF | ||
12 | if { (eval echo configure:1211: \"$ac_link\") 1>&5; (eval $ac_link) 2>&5; } && test -s conftest${ac_exeext}; then | ||
13 | ac_cv_prog_cc_works=yes | ||
diff --git a/meta/recipes-devtools/cmake/cmake-native_3.28.3.bb b/meta/recipes-devtools/cmake/cmake-native_3.29.3.bb index 7f89441fb4..ddc41f289e 100644 --- a/meta/recipes-devtools/cmake/cmake-native_3.28.3.bb +++ b/meta/recipes-devtools/cmake/cmake-native_3.29.3.bb | |||
@@ -16,7 +16,7 @@ LIC_FILES_CHKSUM:append = " \ | |||
16 | file://Utilities/cmexpat/COPYING;md5=9e2ce3b3c4c0f2670883a23bbd7c37a9 \ | 16 | file://Utilities/cmexpat/COPYING;md5=9e2ce3b3c4c0f2670883a23bbd7c37a9 \ |
17 | file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \ | 17 | file://Utilities/cmlibrhash/COPYING;md5=a8c2a557a5c53b1c12cddbee98c099af \ |
18 | file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \ | 18 | file://Utilities/cmlibuv/LICENSE;md5=ad93ca1fffe931537fcf64f6fcce084d \ |
19 | file://Utilities/cmcurl/COPYING;md5=db8448a1e43eb2125f7740fc397db1f6 \ | 19 | file://Utilities/cmcurl/COPYING;md5=eed2e5088e1ac619c9a1c747da291d75 \ |
20 | " | 20 | " |
21 | 21 | ||
22 | B = "${WORKDIR}/build" | 22 | B = "${WORKDIR}/build" |
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc index ab9f459c05..1155c1bbe0 100644 --- a/meta/recipes-devtools/cmake/cmake.inc +++ b/meta/recipes-devtools/cmake/cmake.inc | |||
@@ -10,7 +10,7 @@ HOMEPAGE = "http://www.cmake.org/" | |||
10 | BUGTRACKER = "http://public.kitware.com/Bug/my_view_page.php" | 10 | BUGTRACKER = "http://public.kitware.com/Bug/my_view_page.php" |
11 | SECTION = "console/utils" | 11 | SECTION = "console/utils" |
12 | LICENSE = "BSD-3-Clause" | 12 | LICENSE = "BSD-3-Clause" |
13 | LIC_FILES_CHKSUM = "file://Copyright.txt;md5=9d3d12c5f3b4c1f83650adcc65b59c06 \ | 13 | LIC_FILES_CHKSUM = "file://Copyright.txt;md5=718f05155941b33862726348d3cd46ce \ |
14 | file://Source/cmake.h;beginline=1;endline=2;md5=a5f70e1fef8614734eae0d62b4f5891b \ | 14 | file://Source/cmake.h;beginline=1;endline=2;md5=a5f70e1fef8614734eae0d62b4f5891b \ |
15 | " | 15 | " |
16 | 16 | ||
@@ -19,7 +19,7 @@ CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:2])}" | |||
19 | SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ | 19 | SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ |
20 | " | 20 | " |
21 | 21 | ||
22 | SRC_URI[sha256sum] = "72b7570e5c8593de6ac4ab433b73eab18c5fb328880460c86ce32608141ad5c1" | 22 | SRC_URI[sha256sum] = "252aee1448d49caa04954fd5e27d189dd51570557313e7b281636716a238bccb" |
23 | 23 | ||
24 | UPSTREAM_CHECK_REGEX = "cmake-(?P<pver>\d+(\.\d+)+)\.tar" | 24 | UPSTREAM_CHECK_REGEX = "cmake-(?P<pver>\d+(\.\d+)+)\.tar" |
25 | 25 | ||
diff --git a/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch b/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch index b2933d88be..b7ec3e7bac 100644 --- a/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch +++ b/meta/recipes-devtools/cmake/cmake/0001-CMakeLists.txt-disable-USE_NGHTTP2.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From d33d8a5e9f3b25a80d47b72b1a8a6624a85563c1 Mon Sep 17 00:00:00 2001 | 1 | From 946011cbfd686fe6bd2cec94494f6b0c4394bb6c Mon Sep 17 00:00:00 2001 |
2 | From: Changqing Li <changqing.li@windriver.com> | 2 | From: Changqing Li <changqing.li@windriver.com> |
3 | Date: Wed, 28 Dec 2022 17:51:27 +0800 | 3 | Date: Wed, 28 Dec 2022 17:51:27 +0800 |
4 | Subject: [PATCH] CMakeLists.txt: disable USE_NGHTTP2 | 4 | Subject: [PATCH] CMakeLists.txt: disable USE_NGHTTP2 |
@@ -18,10 +18,10 @@ Signed-off-by: Trevor Gamblin <tgamblin@baylibre.com> | |||
18 | 1 file changed, 1 insertion(+), 1 deletion(-) | 18 | 1 file changed, 1 insertion(+), 1 deletion(-) |
19 | 19 | ||
20 | diff --git a/Utilities/cmcurl/CMakeLists.txt b/Utilities/cmcurl/CMakeLists.txt | 20 | diff --git a/Utilities/cmcurl/CMakeLists.txt b/Utilities/cmcurl/CMakeLists.txt |
21 | index 9387247366..d3de01f4e8 100644 | 21 | index 3dd24f1e..4a0f6773 100644 |
22 | --- a/Utilities/cmcurl/CMakeLists.txt | 22 | --- a/Utilities/cmcurl/CMakeLists.txt |
23 | +++ b/Utilities/cmcurl/CMakeLists.txt | 23 | +++ b/Utilities/cmcurl/CMakeLists.txt |
24 | @@ -88,7 +88,7 @@ set(HTTP_ONLY OFF CACHE INTERNAL "Curl is not http-only") | 24 | @@ -91,7 +91,7 @@ set(HTTP_ONLY OFF CACHE INTERNAL "Curl is not http-only") |
25 | set(PICKY_COMPILER OFF CACHE INTERNAL "Enable picky compiler options") | 25 | set(PICKY_COMPILER OFF CACHE INTERNAL "Enable picky compiler options") |
26 | set(SHARE_LIB_OBJECT OFF) | 26 | set(SHARE_LIB_OBJECT OFF) |
27 | set(USE_LIBIDN2 ON) | 27 | set(USE_LIBIDN2 ON) |
@@ -30,6 +30,3 @@ index 9387247366..d3de01f4e8 100644 | |||
30 | set(USE_NGTCP2 OFF) | 30 | set(USE_NGTCP2 OFF) |
31 | set(USE_QUICHE OFF) | 31 | set(USE_QUICHE OFF) |
32 | set(USE_WIN32_IDN OFF) | 32 | set(USE_WIN32_IDN OFF) |
33 | -- | ||
34 | 2.43.0 | ||
35 | |||
diff --git a/meta/recipes-devtools/cmake/cmake/0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch b/meta/recipes-devtools/cmake/cmake/0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch index d6f7308fe0..af164084d0 100644 --- a/meta/recipes-devtools/cmake/cmake/0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch +++ b/meta/recipes-devtools/cmake/cmake/0005-Disable-use-of-ext2fs-ext2_fs.h-by-cmake-s-internal-.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From fd9a04c1434e12f21c043385e306e0b52d38d749 Mon Sep 17 00:00:00 2001 | 1 | From 2d3f6933f78d393514a48f8222ac8c3c39cb5fc7 Mon Sep 17 00:00:00 2001 |
2 | From: Otavio Salvador <otavio@ossystems.com.br> | 2 | From: Otavio Salvador <otavio@ossystems.com.br> |
3 | Date: Thu, 5 Jul 2018 10:28:04 -0300 | 3 | Date: Thu, 5 Jul 2018 10:28:04 -0300 |
4 | Subject: [PATCH] Disable use of ext2fs/ext2_fs.h by cmake's internal | 4 | Subject: [PATCH] Disable use of ext2fs/ext2_fs.h by cmake's internal |
@@ -13,16 +13,15 @@ Upstream-Status: Inappropriate [config] | |||
13 | 13 | ||
14 | Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com> | 14 | Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com> |
15 | Signed-off-by: Otavio Salvador <otavio@ossystems.com.br> | 15 | Signed-off-by: Otavio Salvador <otavio@ossystems.com.br> |
16 | |||
17 | --- | 16 | --- |
18 | Utilities/cmlibarchive/CMakeLists.txt | 8 ++------ | 17 | Utilities/cmlibarchive/CMakeLists.txt | 8 ++------ |
19 | 1 file changed, 2 insertions(+), 6 deletions(-) | 18 | 1 file changed, 2 insertions(+), 6 deletions(-) |
20 | 19 | ||
21 | diff --git a/Utilities/cmlibarchive/CMakeLists.txt b/Utilities/cmlibarchive/CMakeLists.txt | 20 | diff --git a/Utilities/cmlibarchive/CMakeLists.txt b/Utilities/cmlibarchive/CMakeLists.txt |
22 | index bfcaf30..2960683 100644 | 21 | index e47184b7..e4964c32 100644 |
23 | --- a/Utilities/cmlibarchive/CMakeLists.txt | 22 | --- a/Utilities/cmlibarchive/CMakeLists.txt |
24 | +++ b/Utilities/cmlibarchive/CMakeLists.txt | 23 | +++ b/Utilities/cmlibarchive/CMakeLists.txt |
25 | @@ -682,12 +682,8 @@ LA_CHECK_INCLUDE_FILE("copyfile.h" HAVE_COPYFILE_H) | 24 | @@ -721,12 +721,8 @@ LA_CHECK_INCLUDE_FILE("copyfile.h" HAVE_COPYFILE_H) |
26 | LA_CHECK_INCLUDE_FILE("direct.h" HAVE_DIRECT_H) | 25 | LA_CHECK_INCLUDE_FILE("direct.h" HAVE_DIRECT_H) |
27 | LA_CHECK_INCLUDE_FILE("dlfcn.h" HAVE_DLFCN_H) | 26 | LA_CHECK_INCLUDE_FILE("dlfcn.h" HAVE_DLFCN_H) |
28 | LA_CHECK_INCLUDE_FILE("errno.h" HAVE_ERRNO_H) | 27 | LA_CHECK_INCLUDE_FILE("errno.h" HAVE_ERRNO_H) |
@@ -35,5 +34,5 @@ index bfcaf30..2960683 100644 | |||
35 | +SET(HAVE_EXT2FS_EXT2_FS_H 0) | 34 | +SET(HAVE_EXT2FS_EXT2_FS_H 0) |
36 | +SET(HAVE_WORKING_EXT2_IOC_GETFLAGS 0) | 35 | +SET(HAVE_WORKING_EXT2_IOC_GETFLAGS 0) |
37 | LA_CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H) | 36 | LA_CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H) |
37 | LA_CHECK_INCLUDE_FILE("fnmatch.h" HAVE_FNMATCH_H) | ||
38 | LA_CHECK_INCLUDE_FILE("grp.h" HAVE_GRP_H) | 38 | LA_CHECK_INCLUDE_FILE("grp.h" HAVE_GRP_H) |
39 | LA_CHECK_INCLUDE_FILE("io.h" HAVE_IO_H) | ||
diff --git a/meta/recipes-devtools/cmake/cmake_3.28.3.bb b/meta/recipes-devtools/cmake/cmake_3.29.3.bb index 9146fa8c0f..9146fa8c0f 100644 --- a/meta/recipes-devtools/cmake/cmake_3.28.3.bb +++ b/meta/recipes-devtools/cmake/cmake_3.29.3.bb | |||
diff --git a/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb b/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.1.bb index 1f97c99bde..e6714629af 100644 --- a/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.0.bb +++ b/meta/recipes-devtools/createrepo-c/createrepo-c_1.1.1.bb | |||
@@ -9,7 +9,7 @@ SRC_URI = "git://github.com/rpm-software-management/createrepo_c;branch=master;p | |||
9 | file://0001-include-rpm-rpmstring.h.patch \ | 9 | file://0001-include-rpm-rpmstring.h.patch \ |
10 | " | 10 | " |
11 | 11 | ||
12 | SRCREV = "10a8a7af4f1de3f98a21a7d08fe3a46ef306d197" | 12 | SRCREV = "78b0c3ff48bd58eb9c0a72879cf1025e9f225684" |
13 | 13 | ||
14 | S = "${WORKDIR}/git" | 14 | S = "${WORKDIR}/git" |
15 | 15 | ||
diff --git a/meta/recipes-devtools/devel-config/distcc-config.bb b/meta/recipes-devtools/devel-config/distcc-config.bb index 698fd3585a..2c30488b82 100644 --- a/meta/recipes-devtools/devel-config/distcc-config.bb +++ b/meta/recipes-devtools/devel-config/distcc-config.bb | |||
@@ -6,7 +6,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384 | |||
6 | 6 | ||
7 | SRC_URI = "file://distcc.sh" | 7 | SRC_URI = "file://distcc.sh" |
8 | 8 | ||
9 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
10 | 11 | ||
11 | # Default to the host machine for a running qemu session | 12 | # Default to the host machine for a running qemu session |
12 | DISTCC_HOSTS ?= "192.168.7.1" | 13 | DISTCC_HOSTS ?= "192.168.7.1" |
diff --git a/meta/recipes-devtools/devel-config/nfs-export-root.bb b/meta/recipes-devtools/devel-config/nfs-export-root.bb index 5e69962d7c..ad5486aa87 100644 --- a/meta/recipes-devtools/devel-config/nfs-export-root.bb +++ b/meta/recipes-devtools/devel-config/nfs-export-root.bb | |||
@@ -3,10 +3,10 @@ DESCRIPTION = "Enables NFS access from any host to the entire filesystem (for de | |||
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 4 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
5 | 5 | ||
6 | |||
7 | SRC_URI = "file://exports" | 6 | SRC_URI = "file://exports" |
8 | 7 | ||
9 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
10 | 10 | ||
11 | do_install() { | 11 | do_install() { |
12 | install -d ${D}${sysconfdir} | 12 | install -d ${D}${sysconfdir} |
diff --git a/meta/recipes-devtools/dmidecode/dmidecode_3.5.bb b/meta/recipes-devtools/dmidecode/dmidecode_3.6.bb index 3e2bb6e30b..e5b4e27687 100644 --- a/meta/recipes-devtools/dmidecode/dmidecode_3.5.bb +++ b/meta/recipes-devtools/dmidecode/dmidecode_3.6.bb | |||
@@ -18,4 +18,4 @@ do_install() { | |||
18 | install | 18 | install |
19 | } | 19 | } |
20 | 20 | ||
21 | SRC_URI[sha256sum] = "79d76735ee8e25196e2a722964cf9683f5a09581503537884b256b01389cc073" | 21 | SRC_URI[sha256sum] = "e40c65f3ec3dafe31ad8349a4ef1a97122d38f65004ed66575e1a8d575dd8bae" |
diff --git a/meta/recipes-devtools/dnf/dnf_4.19.2.bb b/meta/recipes-devtools/dnf/dnf_4.20.0.bb index cc91dbe400..4757346cbf 100644 --- a/meta/recipes-devtools/dnf/dnf_4.19.2.bb +++ b/meta/recipes-devtools/dnf/dnf_4.20.0.bb | |||
@@ -20,7 +20,7 @@ SRC_URI = "git://github.com/rpm-software-management/dnf.git;branch=master;protoc | |||
20 | 20 | ||
21 | SRC_URI:append:class-native = "file://0001-dnf-write-the-log-lock-to-root.patch" | 21 | SRC_URI:append:class-native = "file://0001-dnf-write-the-log-lock-to-root.patch" |
22 | 22 | ||
23 | SRCREV = "9b2b2e8ddab99caba4bc8059cab4263163172e81" | 23 | SRCREV = "e3cb438c0fd08c79676c0f3276aa7d75cd8557c6" |
24 | UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)" | 24 | UPSTREAM_CHECK_GITTAGREGEX = "(?P<pver>\d+(\.\d+)+)" |
25 | 25 | ||
26 | S = "${WORKDIR}/git" | 26 | S = "${WORKDIR}/git" |
diff --git a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb index bbd574e287..5229a0366c 100644 --- a/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb +++ b/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb | |||
@@ -41,7 +41,8 @@ SRC_URI[payloadPV.sha256sum] = "4e4e037a2b83c98c6c94818390d4bdd3f6e10f6ec62dd791 | |||
41 | 41 | ||
42 | UPSTREAM_CHECK_REGEX = "docbook-xml-(?P<pver>4(\.\d+)).zip" | 42 | UPSTREAM_CHECK_REGEX = "docbook-xml-(?P<pver>4(\.\d+)).zip" |
43 | 43 | ||
44 | S = "${WORKDIR}" | 44 | S = "${WORKDIR}/sources" |
45 | UNPACKDIR = "${S}" | ||
45 | 46 | ||
46 | do_configure (){ | 47 | do_configure (){ |
47 | : | 48 | : |
diff --git a/meta/recipes-devtools/dwarfsrcfiles/dwarfsrcfiles.bb b/meta/recipes-devtools/dwarfsrcfiles/dwarfsrcfiles.bb index c596497b8e..8e42b12b77 100644 --- a/meta/recipes-devtools/dwarfsrcfiles/dwarfsrcfiles.bb +++ b/meta/recipes-devtools/dwarfsrcfiles/dwarfsrcfiles.bb | |||
@@ -1,19 +1,22 @@ | |||
1 | SUMMARY = "A small utility for printing debug source file locations embedded in binaries" | 1 | SUMMARY = "A small utility for printing debug source file locations embedded in binaries" |
2 | DESCRIPTION = "${SUMMARY}" | 2 | DESCRIPTION = "${SUMMARY}" |
3 | LICENSE = "GPL-2.0-or-later" | 3 | LICENSE = "GPL-2.0-or-later" |
4 | LIC_FILES_CHKSUM = "file://../dwarfsrcfiles.c;md5=31483894e453a77acbb67847565f1b5c;beginline=1;endline=8" | 4 | LIC_FILES_CHKSUM = "file://dwarfsrcfiles.c;md5=31483894e453a77acbb67847565f1b5c;beginline=1;endline=8" |
5 | 5 | ||
6 | SRC_URI = "file://dwarfsrcfiles.c" | 6 | SRC_URI = "file://dwarfsrcfiles.c" |
7 | BBCLASSEXTEND = "native" | 7 | BBCLASSEXTEND = "native" |
8 | DEPENDS = "elfutils" | 8 | DEPENDS = "elfutils" |
9 | DEPENDS:append:libc-musl = " argp-standalone" | 9 | DEPENDS:append:libc-musl = " argp-standalone" |
10 | 10 | ||
11 | S = "${WORKDIR}/sources" | ||
12 | UNPACKDIR = "${S}" | ||
13 | |||
11 | do_compile () { | 14 | do_compile () { |
12 | ${CC} ${CFLAGS} ${LDFLAGS} -o dwarfsrcfiles ../dwarfsrcfiles.c -lelf -ldw | 15 | ${CC} ${CFLAGS} ${LDFLAGS} -o dwarfsrcfiles ${S}/dwarfsrcfiles.c -lelf -ldw |
13 | } | 16 | } |
14 | 17 | ||
15 | do_compile:libc-musl () { | 18 | do_compile:libc-musl () { |
16 | ${CC} ${CFLAGS} ${LDFLAGS} -o dwarfsrcfiles ../dwarfsrcfiles.c -lelf -ldw -largp | 19 | ${CC} ${CFLAGS} ${LDFLAGS} -o dwarfsrcfiles ${S}/dwarfsrcfiles.c -lelf -ldw -largp |
17 | } | 20 | } |
18 | 21 | ||
19 | do_install () { | 22 | do_install () { |
diff --git a/meta/recipes-devtools/elfutils/elfutils_0.191.bb b/meta/recipes-devtools/elfutils/elfutils_0.191.bb index c4d872430b..76bd2b3a99 100644 --- a/meta/recipes-devtools/elfutils/elfutils_0.191.bb +++ b/meta/recipes-devtools/elfutils/elfutils_0.191.bb | |||
@@ -15,7 +15,6 @@ SRC_URI = "https://sourceware.org/elfutils/ftp/${PV}/${BP}.tar.bz2 \ | |||
15 | file://0001-dso-link-change.patch \ | 15 | file://0001-dso-link-change.patch \ |
16 | file://0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch \ | 16 | file://0002-Fix-elf_cvt_gunhash-if-dest-and-src-are-same.patch \ |
17 | file://0003-fixheadercheck.patch \ | 17 | file://0003-fixheadercheck.patch \ |
18 | file://0006-Fix-build-on-aarch64-musl.patch \ | ||
19 | file://0001-libasm-may-link-with-libbz2-if-found.patch \ | 18 | file://0001-libasm-may-link-with-libbz2-if-found.patch \ |
20 | file://0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch \ | 19 | file://0001-libelf-elf_end.c-check-data_list.data.d.d_buf-before.patch \ |
21 | file://0001-skip-the-test-when-gcc-not-deployed.patch \ | 20 | file://0001-skip-the-test-when-gcc-not-deployed.patch \ |
diff --git a/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch b/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch deleted file mode 100644 index 149e0e6a7b..0000000000 --- a/meta/recipes-devtools/elfutils/files/0006-Fix-build-on-aarch64-musl.patch +++ /dev/null | |||
@@ -1,58 +0,0 @@ | |||
1 | From 4409f128c81a9d76b9360b002a1d76043c77b53e Mon Sep 17 00:00:00 2001 | ||
2 | From: Hongxu Jia <hongxu.jia@windriver.com> | ||
3 | Date: Tue, 15 Aug 2017 17:27:30 +0800 | ||
4 | Subject: [PATCH] Fix build on aarch64/musl | ||
5 | |||
6 | Errors | ||
7 | |||
8 | invalid operands to binary & (have 'long double' and 'unsigned int') | ||
9 | |||
10 | error: redefinition | ||
11 | of 'struct iovec' | ||
12 | struct iovec { void *iov_base; size_t iov_len; }; | ||
13 | ^ | ||
14 | Upstream-Status: Pending | ||
15 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
16 | |||
17 | Rebase to 0.170 | ||
18 | Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> | ||
19 | --- | ||
20 | backends/aarch64_initreg.c | 4 ++-- | ||
21 | backends/arm_initreg.c | 2 +- | ||
22 | 2 files changed, 3 insertions(+), 3 deletions(-) | ||
23 | |||
24 | diff --git a/backends/aarch64_initreg.c b/backends/aarch64_initreg.c | ||
25 | index daf6f37..6445276 100644 | ||
26 | --- a/backends/aarch64_initreg.c | ||
27 | +++ b/backends/aarch64_initreg.c | ||
28 | @@ -33,7 +33,7 @@ | ||
29 | #include "system.h" | ||
30 | #include <assert.h> | ||
31 | #if defined(__aarch64__) && defined(__linux__) | ||
32 | -# include <linux/uio.h> | ||
33 | +# include <sys/uio.h> | ||
34 | # include <sys/user.h> | ||
35 | # include <sys/ptrace.h> | ||
36 | /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ | ||
37 | @@ -82,7 +82,7 @@ aarch64_set_initial_registers_tid (pid_t tid __attribute__ ((unused)), | ||
38 | |||
39 | Dwarf_Word dwarf_fregs[32]; | ||
40 | for (int r = 0; r < 32; r++) | ||
41 | - dwarf_fregs[r] = fregs.vregs[r] & 0xFFFFFFFF; | ||
42 | + dwarf_fregs[r] = (unsigned int)fregs.vregs[r] & 0xFFFFFFFF; | ||
43 | |||
44 | if (! setfunc (64, 32, dwarf_fregs, arg)) | ||
45 | return false; | ||
46 | diff --git a/backends/arm_initreg.c b/backends/arm_initreg.c | ||
47 | index efcabaf..062bb9e 100644 | ||
48 | --- a/backends/arm_initreg.c | ||
49 | +++ b/backends/arm_initreg.c | ||
50 | @@ -38,7 +38,7 @@ | ||
51 | #endif | ||
52 | |||
53 | #ifdef __aarch64__ | ||
54 | -# include <linux/uio.h> | ||
55 | +# include <sys/uio.h> | ||
56 | # include <sys/user.h> | ||
57 | # include <sys/ptrace.h> | ||
58 | /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ | ||
diff --git a/meta/recipes-devtools/gcc/gcc-13.2.inc b/meta/recipes-devtools/gcc/gcc-14.1.inc index 603377a49a..c639cb51f4 100644 --- a/meta/recipes-devtools/gcc/gcc-13.2.inc +++ b/meta/recipes-devtools/gcc/gcc-14.1.inc | |||
@@ -2,11 +2,11 @@ require gcc-common.inc | |||
2 | 2 | ||
3 | # Third digit in PV should be incremented after a minor release | 3 | # Third digit in PV should be incremented after a minor release |
4 | 4 | ||
5 | PV = "13.2.0" | 5 | PV = "14.1.0" |
6 | 6 | ||
7 | # BINV should be incremented to a revision after a minor gcc release | 7 | # BINV should be incremented to a revision after a minor gcc release |
8 | 8 | ||
9 | BINV = "13.2.0" | 9 | BINV = "14.1.0" |
10 | 10 | ||
11 | FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc:${FILE_DIRNAME}/gcc/backport:" | 11 | FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc:${FILE_DIRNAME}/gcc/backport:" |
12 | 12 | ||
@@ -28,16 +28,19 @@ LIC_FILES_CHKSUM = "\ | |||
28 | #SOURCEDIR = "gcc-${RELEASE}" | 28 | #SOURCEDIR = "gcc-${RELEASE}" |
29 | #BASEURI ?= "https://repo.or.cz/official-gcc.git/snapshot/${RELEASE}.tar.gz;downloadfilename=gcc-${PV}-${RELEASE}.tar.gz" | 29 | #BASEURI ?= "https://repo.or.cz/official-gcc.git/snapshot/${RELEASE}.tar.gz;downloadfilename=gcc-${PV}-${RELEASE}.tar.gz" |
30 | #SOURCEDIR ?= "official-gcc-${@'${RELEASE}'[0:7]}" | 30 | #SOURCEDIR ?= "official-gcc-${@'${RELEASE}'[0:7]}" |
31 | #SRC_URI[sha256sum] = "41bf7be5dc029112a9df625266e7de030ffc36ff3638f270b180ae8c91fe6449" | ||
31 | 32 | ||
32 | # from snapshot | 33 | # from snapshot |
33 | #RELEASE ?= "13-20230520" | 34 | #RELEASE ?= "14-20240504" |
34 | #SOURCEDIR ?= "gcc-${RELEASE}" | ||
35 | #BASEURI ?= "https://gcc.gnu.org/pub/gcc/snapshots/${RELEASE}/gcc-${RELEASE}.tar.xz" | 35 | #BASEURI ?= "https://gcc.gnu.org/pub/gcc/snapshots/${RELEASE}/gcc-${RELEASE}.tar.xz" |
36 | #SOURCEDIR ?= "gcc-${RELEASE}" | ||
37 | #SRC_URI[sha256sum] = "34ac232bbf31e0cd5752fd344c2cf55719aaaad2ca0096593e01c97a3d5e274e" | ||
36 | 38 | ||
37 | # official release | 39 | # official release |
38 | RELEASE ?= "${PV}" | 40 | RELEASE ?= "${PV}" |
39 | BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.xz" | 41 | BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.xz" |
40 | SOURCEDIR ?= "gcc-${PV}" | 42 | SOURCEDIR ?= "gcc-${PV}" |
43 | SRC_URI[sha256sum] = "e283c654987afe3de9d8080bc0bd79534b5ca0d681a73a11ff2b5d3767426840" | ||
41 | 44 | ||
42 | SRC_URI = "${BASEURI} \ | 45 | SRC_URI = "${BASEURI} \ |
43 | file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \ | 46 | file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \ |
@@ -65,11 +68,7 @@ SRC_URI = "${BASEURI} \ | |||
65 | file://0023-Fix-install-path-of-linux64.h.patch \ | 68 | file://0023-Fix-install-path-of-linux64.h.patch \ |
66 | file://0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch \ | 69 | file://0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch \ |
67 | file://0025-gcc-testsuite-tweaks-for-mips-OE.patch \ | 70 | file://0025-gcc-testsuite-tweaks-for-mips-OE.patch \ |
68 | file://CVE-2023-4039.patch \ | ||
69 | file://0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch \ | ||
70 | file://0027-Fix-gcc-vect-module-testcases.patch \ | ||
71 | " | 71 | " |
72 | SRC_URI[sha256sum] = "e275e76442a6067341a27f04c5c6b83d8613144004c0413528863dc6b5c743da" | ||
73 | 72 | ||
74 | S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/${SOURCEDIR}" | 73 | S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/${SOURCEDIR}" |
75 | B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}" | 74 | B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}" |
@@ -116,4 +115,3 @@ EXTRA_OECONF_PATHS = "\ | |||
116 | " | 115 | " |
117 | 116 | ||
118 | CVE_STATUS[CVE-2021-37322] = "cpe-incorrect: Is a binutils 2.26 issue, not gcc" | 117 | CVE_STATUS[CVE-2021-37322] = "cpe-incorrect: Is a binutils 2.26 issue, not gcc" |
119 | CVE_STATUS[CVE-2023-4039] = "fixed-version: Fixed via CVE-2023-4039.patch included here. Set the status explictly to deal with all recipes that share the gcc-source" | ||
diff --git a/meta/recipes-devtools/gcc/gcc-cross-canadian_13.2.bb b/meta/recipes-devtools/gcc/gcc-cross-canadian_14.1.bb index bf53c5cd78..bf53c5cd78 100644 --- a/meta/recipes-devtools/gcc/gcc-cross-canadian_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-cross-canadian_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc-cross_13.2.bb b/meta/recipes-devtools/gcc/gcc-cross_14.1.bb index b43cca0c52..b43cca0c52 100644 --- a/meta/recipes-devtools/gcc/gcc-cross_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-cross_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc-crosssdk_13.2.bb b/meta/recipes-devtools/gcc/gcc-crosssdk_14.1.bb index 40a6c4feff..40a6c4feff 100644 --- a/meta/recipes-devtools/gcc/gcc-crosssdk_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-crosssdk_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc-runtime.inc b/meta/recipes-devtools/gcc/gcc-runtime.inc index dbc9141000..89b0bebcfb 100644 --- a/meta/recipes-devtools/gcc/gcc-runtime.inc +++ b/meta/recipes-devtools/gcc/gcc-runtime.inc | |||
@@ -92,7 +92,7 @@ do_install () { | |||
92 | mv ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include/* ${D}${libdir}/${TARGET_SYS}/${BINV}/include | 92 | mv ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include/* ${D}${libdir}/${TARGET_SYS}/${BINV}/include |
93 | rmdir --ignore-fail-on-non-empty -p ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include | 93 | rmdir --ignore-fail-on-non-empty -p ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/include |
94 | fi | 94 | fi |
95 | rm -rf ${D}${infodir}/libgomp.info ${D}${infodir}/dir | 95 | rm -rf ${D}${infodir}/libgomp.info* ${D}${infodir}/dir |
96 | rm -rf ${D}${infodir}/libitm.info ${D}${infodir}/dir | 96 | rm -rf ${D}${infodir}/libitm.info ${D}${infodir}/dir |
97 | rm -rf ${D}${infodir}/libquadmath.info ${D}${infodir}/dir | 97 | rm -rf ${D}${infodir}/libquadmath.info ${D}${infodir}/dir |
98 | if [ -d ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/finclude ]; then | 98 | if [ -d ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/finclude ]; then |
diff --git a/meta/recipes-devtools/gcc/gcc-runtime_13.2.bb b/meta/recipes-devtools/gcc/gcc-runtime_14.1.bb index dd430b57eb..dd430b57eb 100644 --- a/meta/recipes-devtools/gcc/gcc-runtime_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-runtime_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc-sanitizers_13.2.bb b/meta/recipes-devtools/gcc/gcc-sanitizers_14.1.bb index 8bda2ccad6..8bda2ccad6 100644 --- a/meta/recipes-devtools/gcc/gcc-sanitizers_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-sanitizers_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc-source_13.2.bb b/meta/recipes-devtools/gcc/gcc-source_14.1.bb index b890fa33ea..b890fa33ea 100644 --- a/meta/recipes-devtools/gcc/gcc-source_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc-source_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch b/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch index 5c75698eda..0b7c0af86f 100644 --- a/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch +++ b/meta/recipes-devtools/gcc/gcc/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 553564bdcabdcc5d4cc4de73c7eb94c505ef51f5 Mon Sep 17 00:00:00 2001 | 1 | From 76ef337260aadeb475fd380ff9aca31ea1731345 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 08:37:11 +0400 | 3 | Date: Fri, 29 Mar 2013 08:37:11 +0400 |
4 | Subject: [PATCH] gcc-4.3.1: ARCH_FLAGS_FOR_TARGET | 4 | Subject: [PATCH] gcc-4.3.1: ARCH_FLAGS_FOR_TARGET |
@@ -12,10 +12,10 @@ Upstream-Status: Inappropriate [embedded specific] | |||
12 | 2 files changed, 2 insertions(+), 2 deletions(-) | 12 | 2 files changed, 2 insertions(+), 2 deletions(-) |
13 | 13 | ||
14 | diff --git a/configure b/configure | 14 | diff --git a/configure b/configure |
15 | index 117a7ef23f2..535265253fd 100755 | 15 | index 02b435c1163..85a82fad302 100755 |
16 | --- a/configure | 16 | --- a/configure |
17 | +++ b/configure | 17 | +++ b/configure |
18 | @@ -10195,7 +10195,7 @@ fi | 18 | @@ -10387,7 +10387,7 @@ fi |
19 | # for target_alias and gcc doesn't manage it consistently. | 19 | # for target_alias and gcc doesn't manage it consistently. |
20 | target_configargs="--cache-file=./config.cache ${target_configargs}" | 20 | target_configargs="--cache-file=./config.cache ${target_configargs}" |
21 | 21 | ||
@@ -25,10 +25,10 @@ index 117a7ef23f2..535265253fd 100755 | |||
25 | *" newlib "*) | 25 | *" newlib "*) |
26 | case " $target_configargs " in | 26 | case " $target_configargs " in |
27 | diff --git a/configure.ac b/configure.ac | 27 | diff --git a/configure.ac b/configure.ac |
28 | index b3e9bbd2aa5..5ac8d6490f6 100644 | 28 | index 1a19c07a27b..bc8e1b8c4ef 100644 |
29 | --- a/configure.ac | 29 | --- a/configure.ac |
30 | +++ b/configure.ac | 30 | +++ b/configure.ac |
31 | @@ -3351,7 +3351,7 @@ fi | 31 | @@ -3509,7 +3509,7 @@ fi |
32 | # for target_alias and gcc doesn't manage it consistently. | 32 | # for target_alias and gcc doesn't manage it consistently. |
33 | target_configargs="--cache-file=./config.cache ${target_configargs}" | 33 | target_configargs="--cache-file=./config.cache ${target_configargs}" |
34 | 34 | ||
diff --git a/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch b/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch index 492300047d..76aa69c26e 100644 --- a/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch +++ b/meta/recipes-devtools/gcc/gcc/0002-gcc-poison-system-directories.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 52676b5934ba127c3af39fc484c8236c8fa60b96 Mon Sep 17 00:00:00 2001 | 1 | From 05be69910c99d739ce9246a3bb9426fa6d9f19d4 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 8 Mar 2021 16:04:20 -0800 | 3 | Date: Mon, 8 Mar 2021 16:04:20 -0800 |
4 | Subject: [PATCH] gcc: poison-system-directories | 4 | Subject: [PATCH] gcc: poison-system-directories |
@@ -25,10 +25,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
25 | 7 files changed, 86 insertions(+), 2 deletions(-) | 25 | 7 files changed, 86 insertions(+), 2 deletions(-) |
26 | 26 | ||
27 | diff --git a/gcc/common.opt b/gcc/common.opt | 27 | diff --git a/gcc/common.opt b/gcc/common.opt |
28 | index 862c474d3c8..64c4277c991 100644 | 28 | index ad348844775..df3992b420d 100644 |
29 | --- a/gcc/common.opt | 29 | --- a/gcc/common.opt |
30 | +++ b/gcc/common.opt | 30 | +++ b/gcc/common.opt |
31 | @@ -711,6 +711,10 @@ Wreturn-local-addr | 31 | @@ -715,6 +715,10 @@ Wreturn-local-addr |
32 | Common Var(warn_return_local_addr) Init(1) Warning | 32 | Common Var(warn_return_local_addr) Init(1) Warning |
33 | Warn about returning a pointer/reference to a local or temporary variable. | 33 | Warn about returning a pointer/reference to a local or temporary variable. |
34 | 34 | ||
@@ -40,10 +40,10 @@ index 862c474d3c8..64c4277c991 100644 | |||
40 | Common Var(warn_shadow) Warning | 40 | Common Var(warn_shadow) Warning |
41 | Warn when one variable shadows another. Same as -Wshadow=global. | 41 | Warn when one variable shadows another. Same as -Wshadow=global. |
42 | diff --git a/gcc/config.in b/gcc/config.in | 42 | diff --git a/gcc/config.in b/gcc/config.in |
43 | index 4cad077bfbe..80e832fdb84 100644 | 43 | index f3de4ba6776..3ce3113510b 100644 |
44 | --- a/gcc/config.in | 44 | --- a/gcc/config.in |
45 | +++ b/gcc/config.in | 45 | +++ b/gcc/config.in |
46 | @@ -236,6 +236,16 @@ | 46 | @@ -249,6 +249,16 @@ |
47 | #endif | 47 | #endif |
48 | 48 | ||
49 | 49 | ||
@@ -61,18 +61,18 @@ index 4cad077bfbe..80e832fdb84 100644 | |||
61 | optimizer and back end) to be checked for dynamic type safety at runtime. | 61 | optimizer and back end) to be checked for dynamic type safety at runtime. |
62 | This is quite expensive. */ | 62 | This is quite expensive. */ |
63 | diff --git a/gcc/configure b/gcc/configure | 63 | diff --git a/gcc/configure b/gcc/configure |
64 | index c7b26d1927d..3508be7b439 100755 | 64 | index 266ab8f84b2..3c346acbce0 100755 |
65 | --- a/gcc/configure | 65 | --- a/gcc/configure |
66 | +++ b/gcc/configure | 66 | +++ b/gcc/configure |
67 | @@ -1026,6 +1026,7 @@ enable_maintainer_mode | 67 | @@ -1050,6 +1050,7 @@ enable_maintainer_mode |
68 | enable_link_mutex | 68 | enable_link_mutex |
69 | enable_link_serialization | 69 | enable_link_serialization |
70 | enable_version_specific_runtime_libs | 70 | enable_version_specific_runtime_libs |
71 | +enable_poison_system_directories | 71 | +enable_poison_system_directories |
72 | enable_plugin | 72 | enable_plugin |
73 | enable_host_shared | 73 | enable_host_shared |
74 | enable_libquadmath_support | 74 | enable_host_pie |
75 | @@ -1788,6 +1789,8 @@ Optional Features: | 75 | @@ -1823,6 +1824,8 @@ Optional Features: |
76 | --enable-version-specific-runtime-libs | 76 | --enable-version-specific-runtime-libs |
77 | specify that runtime libraries should be installed | 77 | specify that runtime libraries should be installed |
78 | in a compiler-specific directory | 78 | in a compiler-specific directory |
@@ -80,8 +80,8 @@ index c7b26d1927d..3508be7b439 100755 | |||
80 | + warn for use of native system header directories | 80 | + warn for use of native system header directories |
81 | --enable-plugin enable plugin support | 81 | --enable-plugin enable plugin support |
82 | --enable-host-shared build host code as shared libraries | 82 | --enable-host-shared build host code as shared libraries |
83 | --disable-libquadmath-support | 83 | --enable-host-pie build host code as PIE |
84 | @@ -31753,6 +31756,22 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then : | 84 | @@ -34020,6 +34023,22 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then : |
85 | fi | 85 | fi |
86 | 86 | ||
87 | 87 | ||
@@ -105,10 +105,10 @@ index c7b26d1927d..3508be7b439 100755 | |||
105 | 105 | ||
106 | 106 | ||
107 | diff --git a/gcc/configure.ac b/gcc/configure.ac | 107 | diff --git a/gcc/configure.ac b/gcc/configure.ac |
108 | index 09082e8ccae..6cd01a8966b 100644 | 108 | index a5aec1bc967..4d8123085b8 100644 |
109 | --- a/gcc/configure.ac | 109 | --- a/gcc/configure.ac |
110 | +++ b/gcc/configure.ac | 110 | +++ b/gcc/configure.ac |
111 | @@ -7292,6 +7292,22 @@ AC_ARG_ENABLE(version-specific-runtime-libs, | 111 | @@ -7518,6 +7518,22 @@ AC_ARG_ENABLE(version-specific-runtime-libs, |
112 | [specify that runtime libraries should be | 112 | [specify that runtime libraries should be |
113 | installed in a compiler-specific directory])]) | 113 | installed in a compiler-specific directory])]) |
114 | 114 | ||
@@ -132,10 +132,10 @@ index 09082e8ccae..6cd01a8966b 100644 | |||
132 | AC_SUBST(subdirs) | 132 | AC_SUBST(subdirs) |
133 | AC_SUBST(srcdir) | 133 | AC_SUBST(srcdir) |
134 | diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi | 134 | diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi |
135 | index de40f62e219..d6f203c8b71 100644 | 135 | index 1006510fc6a..389155f8ed5 100644 |
136 | --- a/gcc/doc/invoke.texi | 136 | --- a/gcc/doc/invoke.texi |
137 | +++ b/gcc/doc/invoke.texi | 137 | +++ b/gcc/doc/invoke.texi |
138 | @@ -384,6 +384,7 @@ Objective-C and Objective-C++ Dialects}. | 138 | @@ -391,6 +391,7 @@ Objective-C and Objective-C++ Dialects}. |
139 | -Wpacked -Wno-packed-bitfield-compat -Wpacked-not-aligned -Wpadded | 139 | -Wpacked -Wno-packed-bitfield-compat -Wpacked-not-aligned -Wpadded |
140 | -Wparentheses -Wno-pedantic-ms-format | 140 | -Wparentheses -Wno-pedantic-ms-format |
141 | -Wpointer-arith -Wno-pointer-compare -Wno-pointer-to-int-cast | 141 | -Wpointer-arith -Wno-pointer-compare -Wno-pointer-to-int-cast |
@@ -143,7 +143,7 @@ index de40f62e219..d6f203c8b71 100644 | |||
143 | -Wno-pragmas -Wno-prio-ctor-dtor -Wredundant-decls | 143 | -Wno-pragmas -Wno-prio-ctor-dtor -Wredundant-decls |
144 | -Wrestrict -Wno-return-local-addr -Wreturn-type | 144 | -Wrestrict -Wno-return-local-addr -Wreturn-type |
145 | -Wno-scalar-storage-order -Wsequence-point | 145 | -Wno-scalar-storage-order -Wsequence-point |
146 | @@ -8426,6 +8427,14 @@ made up of data only and thus requires no special treatment. But, for | 146 | @@ -8860,6 +8861,14 @@ made up of data only and thus requires no special treatment. But, for |
147 | most targets, it is made up of code and thus requires the stack to be | 147 | most targets, it is made up of code and thus requires the stack to be |
148 | made executable in order for the program to work properly. | 148 | made executable in order for the program to work properly. |
149 | 149 | ||
@@ -159,19 +159,19 @@ index de40f62e219..d6f203c8b71 100644 | |||
159 | @opindex Wno-float-equal | 159 | @opindex Wno-float-equal |
160 | @item -Wfloat-equal | 160 | @item -Wfloat-equal |
161 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc | 161 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc |
162 | index 16bb07f2cdc..5feae021545 100644 | 162 | index 728332b8153..343e4915097 100644 |
163 | --- a/gcc/gcc.cc | 163 | --- a/gcc/gcc.cc |
164 | +++ b/gcc/gcc.cc | 164 | +++ b/gcc/gcc.cc |
165 | @@ -1146,6 +1146,8 @@ proper position among the other output files. */ | 165 | @@ -1159,6 +1159,8 @@ proper position among the other output files. */ |
166 | "%{fuse-ld=*:-fuse-ld=%*} " LINK_COMPRESS_DEBUG_SPEC \ | 166 | "%{fuse-ld=*:-fuse-ld=%*} " LINK_COMPRESS_DEBUG_SPEC \ |
167 | "%X %{o*} %{e*} %{N} %{n} %{r}\ | 167 | "%X %{o*} %{e*} %{N} %{n} %{r}\ |
168 | %{s} %{t} %{u*} %{z} %{Z} %{!nostdlib:%{!r:%{!nostartfiles:%S}}} \ | 168 | %{s} %{t} %{u*} %{z} %{Z} %{!nostdlib:%{!r:%{!nostartfiles:%S}}} \ |
169 | + %{Wno-poison-system-directories:--no-poison-system-directories} \ | 169 | + %{Wno-poison-system-directories:--no-poison-system-directories} \ |
170 | + %{Werror=poison-system-directories:--error-poison-system-directories} \ | 170 | + %{Werror=poison-system-directories:--error-poison-system-directories} \ |
171 | %{static|no-pie|static-pie:} %@{L*} %(mfwrap) %(link_libgcc) " \ | 171 | %{static|no-pie|static-pie:} %@{L*} %(link_libgcc) " \ |
172 | VTABLE_VERIFICATION_SPEC " " SANITIZER_EARLY_SPEC " %o "" \ | 172 | VTABLE_VERIFICATION_SPEC " " SANITIZER_EARLY_SPEC " %o "" \ |
173 | %{fopenacc|fopenmp|%:gt(%{ftree-parallelize-loops=*:%*} 1):\ | 173 | %{fopenacc|fopenmp|%:gt(%{ftree-parallelize-loops=*:%*} 1):\ |
174 | @@ -1241,8 +1243,11 @@ static const char *cpp_unique_options = | 174 | @@ -1260,8 +1262,11 @@ static const char *cpp_unique_options = |
175 | static const char *cpp_options = | 175 | static const char *cpp_options = |
176 | "%(cpp_unique_options) %1 %{m*} %{std*&ansi&trigraphs} %{W*&pedantic*} %{w}\ | 176 | "%(cpp_unique_options) %1 %{m*} %{std*&ansi&trigraphs} %{W*&pedantic*} %{w}\ |
177 | %{f*} %{g*:%{%:debug-level-gt(0):%{g*}\ | 177 | %{f*} %{g*:%{%:debug-level-gt(0):%{g*}\ |
@@ -186,7 +186,7 @@ index 16bb07f2cdc..5feae021545 100644 | |||
186 | /* Pass -d* flags, possibly modifying -dumpdir, -dumpbase et al. | 186 | /* Pass -d* flags, possibly modifying -dumpdir, -dumpbase et al. |
187 | 187 | ||
188 | diff --git a/gcc/incpath.cc b/gcc/incpath.cc | 188 | diff --git a/gcc/incpath.cc b/gcc/incpath.cc |
189 | index 4d44321183f..46c0d543205 100644 | 189 | index 64cdd2f4a1b..e572d98ab17 100644 |
190 | --- a/gcc/incpath.cc | 190 | --- a/gcc/incpath.cc |
191 | +++ b/gcc/incpath.cc | 191 | +++ b/gcc/incpath.cc |
192 | @@ -26,6 +26,7 @@ | 192 | @@ -26,6 +26,7 @@ |
diff --git a/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch b/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch index 69e7fa0ba9..dfd370aba9 100644 --- a/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch +++ b/meta/recipes-devtools/gcc/gcc/0003-64-bit-multilib-hack.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From febfac59d0e8a864370d0b4018b4e497ceec156d Mon Sep 17 00:00:00 2001 | 1 | From 25bc008191c9b518f30e3ed87e204e25bdafb854 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:10:06 +0400 | 3 | Date: Fri, 29 Mar 2013 09:10:06 +0400 |
4 | Subject: [PATCH] 64-bit multilib hack | 4 | Subject: [PATCH] 64-bit multilib hack |
@@ -28,19 +28,17 @@ Upstream-Status: Inappropriate [OE-Specific] | |||
28 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 28 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
29 | Signed-off-by: Elvis Dowson <elvis.dowson@gmail.com> | 29 | Signed-off-by: Elvis Dowson <elvis.dowson@gmail.com> |
30 | Signed-off-by: Mark Hatle <mark.hatle@windriver.com> | 30 | Signed-off-by: Mark Hatle <mark.hatle@windriver.com> |
31 | Signed-off-by: Zang Ruochen <zangruochen@loongson.cn> | ||
32 | --- | 31 | --- |
33 | gcc/config/aarch64/t-aarch64-linux | 8 ++++---- | 32 | gcc/config/aarch64/t-aarch64-linux | 8 ++++---- |
34 | gcc/config/arc/t-multilib-linux | 4 ++-- | 33 | gcc/config/arc/t-multilib-linux | 4 ++-- |
35 | gcc/config/i386/t-linux64 | 6 ++---- | 34 | gcc/config/i386/t-linux64 | 6 ++---- |
36 | gcc/config/mips/t-linux64 | 28 ++-------------------------- | 35 | gcc/config/mips/t-linux64 | 28 ++-------------------------- |
37 | gcc/config/riscv/t-linux | 4 ++-- | 36 | gcc/config/riscv/t-linux | 6 +++--- |
38 | gcc/config/rs6000/t-linux64 | 5 ++--- | 37 | gcc/config/rs6000/t-linux64 | 5 ++--- |
39 | gcc/config/loongarch/t-linux | 34 ++++++++++++++++++---------------- | 38 | 6 files changed, 15 insertions(+), 42 deletions(-) |
40 | 7 files changed, 32 insertions(+), 57 deletions(-) | ||
41 | 39 | ||
42 | diff --git a/gcc/config/aarch64/t-aarch64-linux b/gcc/config/aarch64/t-aarch64-linux | 40 | diff --git a/gcc/config/aarch64/t-aarch64-linux b/gcc/config/aarch64/t-aarch64-linux |
43 | index 57bf4100fcd..aaef5da8059 100644 | 41 | index 3f7b786ecbb..a6c1c92fc7f 100644 |
44 | --- a/gcc/config/aarch64/t-aarch64-linux | 42 | --- a/gcc/config/aarch64/t-aarch64-linux |
45 | +++ b/gcc/config/aarch64/t-aarch64-linux | 43 | +++ b/gcc/config/aarch64/t-aarch64-linux |
46 | @@ -21,8 +21,8 @@ | 44 | @@ -21,8 +21,8 @@ |
@@ -57,7 +55,7 @@ index 57bf4100fcd..aaef5da8059 100644 | |||
57 | -MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32) | 55 | -MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32) |
58 | +#MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32) | 56 | +#MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu_ilp32) |
59 | diff --git a/gcc/config/arc/t-multilib-linux b/gcc/config/arc/t-multilib-linux | 57 | diff --git a/gcc/config/arc/t-multilib-linux b/gcc/config/arc/t-multilib-linux |
60 | index a839e4ea67c..f92664573a9 100644 | 58 | index 07bc77ba34a..f7e2e3919cf 100644 |
61 | --- a/gcc/config/arc/t-multilib-linux | 59 | --- a/gcc/config/arc/t-multilib-linux |
62 | +++ b/gcc/config/arc/t-multilib-linux | 60 | +++ b/gcc/config/arc/t-multilib-linux |
63 | @@ -16,9 +16,9 @@ | 61 | @@ -16,9 +16,9 @@ |
@@ -73,7 +71,7 @@ index a839e4ea67c..f92664573a9 100644 | |||
73 | # Aliases: | 71 | # Aliases: |
74 | MULTILIB_MATCHES += mcpu?arc700=mA7 | 72 | MULTILIB_MATCHES += mcpu?arc700=mA7 |
75 | diff --git a/gcc/config/i386/t-linux64 b/gcc/config/i386/t-linux64 | 73 | diff --git a/gcc/config/i386/t-linux64 b/gcc/config/i386/t-linux64 |
76 | index 138956b0962..d6e0cdc4342 100644 | 74 | index f9edc289e57..047d8e4aff9 100644 |
77 | --- a/gcc/config/i386/t-linux64 | 75 | --- a/gcc/config/i386/t-linux64 |
78 | +++ b/gcc/config/i386/t-linux64 | 76 | +++ b/gcc/config/i386/t-linux64 |
79 | @@ -32,7 +32,5 @@ | 77 | @@ -32,7 +32,5 @@ |
@@ -87,7 +85,7 @@ index 138956b0962..d6e0cdc4342 100644 | |||
87 | +MULTILIB_DIRNAMES = . . | 85 | +MULTILIB_DIRNAMES = . . |
88 | +MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) | 86 | +MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) |
89 | diff --git a/gcc/config/mips/t-linux64 b/gcc/config/mips/t-linux64 | 87 | diff --git a/gcc/config/mips/t-linux64 b/gcc/config/mips/t-linux64 |
90 | index 176091cabb6..8258ef40559 100644 | 88 | index e4e2f5eaedb..c503974de37 100644 |
91 | --- a/gcc/config/mips/t-linux64 | 89 | --- a/gcc/config/mips/t-linux64 |
92 | +++ b/gcc/config/mips/t-linux64 | 90 | +++ b/gcc/config/mips/t-linux64 |
93 | @@ -17,29 +17,5 @@ | 91 | @@ -17,29 +17,5 @@ |
@@ -123,17 +121,20 @@ index 176091cabb6..8258ef40559 100644 | |||
123 | +MULTILIB_DIRNAMES = . . . | 121 | +MULTILIB_DIRNAMES = . . . |
124 | +MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) | 122 | +MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) |
125 | diff --git a/gcc/config/riscv/t-linux b/gcc/config/riscv/t-linux | 123 | diff --git a/gcc/config/riscv/t-linux b/gcc/config/riscv/t-linux |
126 | index 216d2776a18..e3c520f4bf6 100644 | 124 | index a6f64f88d25..0e21db58f7d 100644 |
127 | --- a/gcc/config/riscv/t-linux | 125 | --- a/gcc/config/riscv/t-linux |
128 | +++ b/gcc/config/riscv/t-linux | 126 | +++ b/gcc/config/riscv/t-linux |
129 | @@ -1,3 +1,3 @@ | 127 | @@ -1,5 +1,5 @@ |
130 | # Only XLEN and ABI affect Linux multilib dir names, e.g. /lib32/ilp32d/ | 128 | # Only XLEN and ABI affect Linux multilib dir names, e.g. /lib32/ilp32d/ |
131 | -MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES))) | 129 | -MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES))) |
132 | -MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES)) | 130 | -MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES)) |
133 | +#MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES))) | 131 | +#MULTILIB_DIRNAMES := $(patsubst rv32%,lib32,$(patsubst rv64%,lib64,$(MULTILIB_DIRNAMES))) |
134 | +#MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES)) | 132 | +#MULTILIB_OSDIRNAMES := $(patsubst lib%,../lib%,$(MULTILIB_DIRNAMES)) |
133 | |||
134 | -MULTIARCH_DIRNAME := $(call if_multiarch,$(firstword $(subst -, ,$(target)))-linux-gnu) | ||
135 | +#MULTIARCH_DIRNAME := $(call if_multiarch,$(firstword $(subst -, ,$(target)))-linux-gnu) | ||
135 | diff --git a/gcc/config/rs6000/t-linux64 b/gcc/config/rs6000/t-linux64 | 136 | diff --git a/gcc/config/rs6000/t-linux64 b/gcc/config/rs6000/t-linux64 |
136 | index 01a94242308..1429eceaebf 100644 | 137 | index 248b5891e36..379689561a7 100644 |
137 | --- a/gcc/config/rs6000/t-linux64 | 138 | --- a/gcc/config/rs6000/t-linux64 |
138 | +++ b/gcc/config/rs6000/t-linux64 | 139 | +++ b/gcc/config/rs6000/t-linux64 |
139 | @@ -26,10 +26,9 @@ | 140 | @@ -26,10 +26,9 @@ |
@@ -149,53 +150,3 @@ index 01a94242308..1429eceaebf 100644 | |||
149 | 150 | ||
150 | rs6000-linux.o: $(srcdir)/config/rs6000/rs6000-linux.cc | 151 | rs6000-linux.o: $(srcdir)/config/rs6000/rs6000-linux.cc |
151 | $(COMPILE) $< | 152 | $(COMPILE) $< |
152 | diff --git a/gcc/config/loongarch/t-linux b/gcc/config/loongarch/t-linux | ||
153 | index e40da1792..0c7ec9f8a 100644 | ||
154 | --- a/gcc/config/loongarch/t-linux | ||
155 | +++ b/gcc/config/loongarch/t-linux | ||
156 | @@ -18,7 +18,9 @@ | ||
157 | |||
158 | # Multilib | ||
159 | MULTILIB_OPTIONS = mabi=lp64d/mabi=lp64f/mabi=lp64s | ||
160 | -MULTILIB_DIRNAMES = base/lp64d base/lp64f base/lp64s | ||
161 | +#MULTILIB_DIRNAMES = base/lp64d base/lp64f base/lp64s | ||
162 | +MULTILIB_DIRNAMES = . . . | ||
163 | +MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) | ||
164 | |||
165 | # The GCC driver always gets all abi-related options on the command line. | ||
166 | # (see loongarch-driver.c:driver_get_normalized_m_opts) | ||
167 | @@ -36,18 +38,18 @@ else | ||
168 | endif | ||
169 | |||
170 | # Don't define MULTILIB_OSDIRNAMES if multilib is disabled. | ||
171 | -ifeq ($(filter LA_DISABLE_MULTILIB,$(tm_defines)),) | ||
172 | - | ||
173 | - MULTILIB_OSDIRNAMES = \ | ||
174 | - mabi.lp64d=../lib64$\ | ||
175 | - $(call if_multiarch,:loongarch64-linux-gnu) | ||
176 | - | ||
177 | - MULTILIB_OSDIRNAMES += \ | ||
178 | - mabi.lp64f=../lib64/f32$\ | ||
179 | - $(call if_multiarch,:loongarch64-linux-gnuf32) | ||
180 | - | ||
181 | - MULTILIB_OSDIRNAMES += \ | ||
182 | - mabi.lp64s=../lib64/sf$\ | ||
183 | - $(call if_multiarch,:loongarch64-linux-gnusf) | ||
184 | - | ||
185 | -endif | ||
186 | +#ifeq ($(filter LA_DISABLE_MULTILIB,$(tm_defines)),) | ||
187 | +# | ||
188 | +# MULTILIB_OSDIRNAMES = \ | ||
189 | +# mabi.lp64d=../lib64$\ | ||
190 | +# $(call if_multiarch,:loongarch64-linux-gnu) | ||
191 | +# | ||
192 | +# MULTILIB_OSDIRNAMES += \ | ||
193 | +# mabi.lp64f=../lib64/f32$\ | ||
194 | +# $(call if_multiarch,:loongarch64-linux-gnuf32) | ||
195 | +# | ||
196 | +# MULTILIB_OSDIRNAMES += \ | ||
197 | +# mabi.lp64s=../lib64/sf$\ | ||
198 | +# $(call if_multiarch,:loongarch64-linux-gnusf) | ||
199 | +# | ||
200 | +#endif | ||
201 | |||
diff --git a/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch b/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch index 7e33bf17b0..35e92a6a9e 100644 --- a/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch +++ b/meta/recipes-devtools/gcc/gcc/0004-Pass-CXXFLAGS_FOR_BUILD-in-a-couple-of-places-to-avo.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 6fbf920ccde6efc2d0caafde996d9e5738a1ba37 Mon Sep 17 00:00:00 2001 | 1 | From 5bef09b829ab8042acc045474cf10f4c4fb15dd5 Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Thu, 28 Oct 2021 11:33:40 +0100 | 3 | Date: Thu, 28 Oct 2021 11:33:40 +0100 |
4 | Subject: [PATCH] Pass CXXFLAGS_FOR_BUILD in a couple of places to avoid these | 4 | Subject: [PATCH] Pass CXXFLAGS_FOR_BUILD in a couple of places to avoid these |
@@ -23,10 +23,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
23 | 2 files changed, 4 insertions(+) | 23 | 2 files changed, 4 insertions(+) |
24 | 24 | ||
25 | diff --git a/Makefile.in b/Makefile.in | 25 | diff --git a/Makefile.in b/Makefile.in |
26 | index 06a9398e172..4b0069b257c 100644 | 26 | index db4fa6c6260..dee862be8a5 100644 |
27 | --- a/Makefile.in | 27 | --- a/Makefile.in |
28 | +++ b/Makefile.in | 28 | +++ b/Makefile.in |
29 | @@ -178,6 +178,7 @@ BUILD_EXPORTS = \ | 29 | @@ -179,6 +179,7 @@ BUILD_EXPORTS = \ |
30 | # built for the build system to override those in BASE_FLAGS_TO_PASS. | 30 | # built for the build system to override those in BASE_FLAGS_TO_PASS. |
31 | EXTRA_BUILD_FLAGS = \ | 31 | EXTRA_BUILD_FLAGS = \ |
32 | CFLAGS="$(CFLAGS_FOR_BUILD)" \ | 32 | CFLAGS="$(CFLAGS_FOR_BUILD)" \ |
@@ -34,7 +34,7 @@ index 06a9398e172..4b0069b257c 100644 | |||
34 | LDFLAGS="$(LDFLAGS_FOR_BUILD)" | 34 | LDFLAGS="$(LDFLAGS_FOR_BUILD)" |
35 | 35 | ||
36 | # This is the list of directories to built for the host system. | 36 | # This is the list of directories to built for the host system. |
37 | @@ -210,6 +211,7 @@ HOST_EXPORTS = \ | 37 | @@ -211,6 +212,7 @@ HOST_EXPORTS = \ |
38 | CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \ | 38 | CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \ |
39 | CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \ | 39 | CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \ |
40 | CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \ | 40 | CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \ |
@@ -43,10 +43,10 @@ index 06a9398e172..4b0069b257c 100644 | |||
43 | DSYMUTIL="$(DSYMUTIL)"; export DSYMUTIL; \ | 43 | DSYMUTIL="$(DSYMUTIL)"; export DSYMUTIL; \ |
44 | LD="$(LD)"; export LD; \ | 44 | LD="$(LD)"; export LD; \ |
45 | diff --git a/Makefile.tpl b/Makefile.tpl | 45 | diff --git a/Makefile.tpl b/Makefile.tpl |
46 | index dfbd74b68f8..419b332953b 100644 | 46 | index 1d5813cd569..ff0fae53b0c 100644 |
47 | --- a/Makefile.tpl | 47 | --- a/Makefile.tpl |
48 | +++ b/Makefile.tpl | 48 | +++ b/Makefile.tpl |
49 | @@ -181,6 +181,7 @@ BUILD_EXPORTS = \ | 49 | @@ -182,6 +182,7 @@ BUILD_EXPORTS = \ |
50 | # built for the build system to override those in BASE_FLAGS_TO_PASS. | 50 | # built for the build system to override those in BASE_FLAGS_TO_PASS. |
51 | EXTRA_BUILD_FLAGS = \ | 51 | EXTRA_BUILD_FLAGS = \ |
52 | CFLAGS="$(CFLAGS_FOR_BUILD)" \ | 52 | CFLAGS="$(CFLAGS_FOR_BUILD)" \ |
@@ -54,7 +54,7 @@ index dfbd74b68f8..419b332953b 100644 | |||
54 | LDFLAGS="$(LDFLAGS_FOR_BUILD)" | 54 | LDFLAGS="$(LDFLAGS_FOR_BUILD)" |
55 | 55 | ||
56 | # This is the list of directories to built for the host system. | 56 | # This is the list of directories to built for the host system. |
57 | @@ -213,6 +214,7 @@ HOST_EXPORTS = \ | 57 | @@ -214,6 +215,7 @@ HOST_EXPORTS = \ |
58 | CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \ | 58 | CPP_FOR_BUILD="$(CPP_FOR_BUILD)"; export CPP_FOR_BUILD; \ |
59 | CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \ | 59 | CPPFLAGS_FOR_BUILD="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS_FOR_BUILD; \ |
60 | CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \ | 60 | CXX_FOR_BUILD="$(CXX_FOR_BUILD)"; export CXX_FOR_BUILD; \ |
diff --git a/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch b/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch index db2fea3d16..8c6ae38e95 100644 --- a/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch +++ b/meta/recipes-devtools/gcc/gcc/0005-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 2cedf13819c0cc929660072d8a972f5e422f9701 Mon Sep 17 00:00:00 2001 | 1 | From fd3f513fa7e4f63a2b28b31ed5a4ddf8f5c543c0 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:17:25 +0400 | 3 | Date: Fri, 29 Mar 2013 09:17:25 +0400 |
4 | Subject: [PATCH] Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B} | 4 | Subject: [PATCH] Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B} |
@@ -26,10 +26,10 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com> | |||
26 | 4 files changed, 7 insertions(+), 7 deletions(-) | 26 | 4 files changed, 7 insertions(+), 7 deletions(-) |
27 | 27 | ||
28 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in | 28 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in |
29 | index 775aaa1b3c4..04f28984b34 100644 | 29 | index a74761b7ab3..956437df95b 100644 |
30 | --- a/gcc/Makefile.in | 30 | --- a/gcc/Makefile.in |
31 | +++ b/gcc/Makefile.in | 31 | +++ b/gcc/Makefile.in |
32 | @@ -561,7 +561,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@ | 32 | @@ -590,7 +590,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@ |
33 | TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@ | 33 | TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@ |
34 | 34 | ||
35 | xmake_file=@xmake_file@ | 35 | xmake_file=@xmake_file@ |
@@ -39,10 +39,10 @@ index 775aaa1b3c4..04f28984b34 100644 | |||
39 | TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@ | 39 | TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@ |
40 | TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@ | 40 | TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@ |
41 | diff --git a/gcc/configure b/gcc/configure | 41 | diff --git a/gcc/configure b/gcc/configure |
42 | index 3508be7b439..cf773a8b854 100755 | 42 | index 3c346acbce0..3870d6e9b6d 100755 |
43 | --- a/gcc/configure | 43 | --- a/gcc/configure |
44 | +++ b/gcc/configure | 44 | +++ b/gcc/configure |
45 | @@ -13507,8 +13507,8 @@ for f in $tm_file; do | 45 | @@ -15155,8 +15155,8 @@ for f in $tm_file; do |
46 | tm_include_list="${tm_include_list} $f" | 46 | tm_include_list="${tm_include_list} $f" |
47 | ;; | 47 | ;; |
48 | defaults.h ) | 48 | defaults.h ) |
@@ -54,10 +54,10 @@ index 3508be7b439..cf773a8b854 100755 | |||
54 | * ) | 54 | * ) |
55 | tm_file_list="${tm_file_list} \$(srcdir)/config/$f" | 55 | tm_file_list="${tm_file_list} \$(srcdir)/config/$f" |
56 | diff --git a/gcc/configure.ac b/gcc/configure.ac | 56 | diff --git a/gcc/configure.ac b/gcc/configure.ac |
57 | index 6cd01a8966b..22591478b72 100644 | 57 | index 4d8123085b8..15670de1706 100644 |
58 | --- a/gcc/configure.ac | 58 | --- a/gcc/configure.ac |
59 | +++ b/gcc/configure.ac | 59 | +++ b/gcc/configure.ac |
60 | @@ -2357,8 +2357,8 @@ for f in $tm_file; do | 60 | @@ -2383,8 +2383,8 @@ for f in $tm_file; do |
61 | tm_include_list="${tm_include_list} $f" | 61 | tm_include_list="${tm_include_list} $f" |
62 | ;; | 62 | ;; |
63 | defaults.h ) | 63 | defaults.h ) |
@@ -69,7 +69,7 @@ index 6cd01a8966b..22591478b72 100644 | |||
69 | * ) | 69 | * ) |
70 | tm_file_list="${tm_file_list} \$(srcdir)/config/$f" | 70 | tm_file_list="${tm_file_list} \$(srcdir)/config/$f" |
71 | diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh | 71 | diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh |
72 | index 054ede89647..3b2c2b9df37 100644 | 72 | index 1b015b70b38..145af56745e 100644 |
73 | --- a/gcc/mkconfig.sh | 73 | --- a/gcc/mkconfig.sh |
74 | +++ b/gcc/mkconfig.sh | 74 | +++ b/gcc/mkconfig.sh |
75 | @@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then | 75 | @@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then |
diff --git a/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch b/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch index 704c44cb72..e0d6aea46b 100644 --- a/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch +++ b/meta/recipes-devtools/gcc/gcc/0006-cpp-honor-sysroot.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From f0b4d02a3a3dca1d67fd7add15ed63c2cd572bb9 Mon Sep 17 00:00:00 2001 | 1 | From 970960876045c99011658836f385c6f4d6d78fa6 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:22:00 +0400 | 3 | Date: Fri, 29 Mar 2013 09:22:00 +0400 |
4 | Subject: [PATCH] cpp: honor sysroot. | 4 | Subject: [PATCH] cpp: honor sysroot. |
@@ -26,7 +26,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
26 | 2 files changed, 2 insertions(+), 2 deletions(-) | 26 | 2 files changed, 2 insertions(+), 2 deletions(-) |
27 | 27 | ||
28 | diff --git a/gcc/cp/lang-specs.h b/gcc/cp/lang-specs.h | 28 | diff --git a/gcc/cp/lang-specs.h b/gcc/cp/lang-specs.h |
29 | index c591d155cc1..61927869fe1 100644 | 29 | index 7a7f5ff0ab5..fb3d88cdb11 100644 |
30 | --- a/gcc/cp/lang-specs.h | 30 | --- a/gcc/cp/lang-specs.h |
31 | +++ b/gcc/cp/lang-specs.h | 31 | +++ b/gcc/cp/lang-specs.h |
32 | @@ -116,7 +116,7 @@ along with GCC; see the file COPYING3. If not see | 32 | @@ -116,7 +116,7 @@ along with GCC; see the file COPYING3. If not see |
@@ -39,11 +39,11 @@ index c591d155cc1..61927869fe1 100644 | |||
39 | " %{fmodule-only:%{!S:-o %g.s%V}}" | 39 | " %{fmodule-only:%{!S:-o %g.s%V}}" |
40 | " %{!fmodule-only:%{!fmodule-header*:%(invoke_as)}}}" | 40 | " %{!fmodule-only:%{!fmodule-header*:%(invoke_as)}}}" |
41 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc | 41 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc |
42 | index 5feae021545..8af0c814c33 100644 | 42 | index 343e4915097..e3e9374d1cc 100644 |
43 | --- a/gcc/gcc.cc | 43 | --- a/gcc/gcc.cc |
44 | +++ b/gcc/gcc.cc | 44 | +++ b/gcc/gcc.cc |
45 | @@ -1468,7 +1468,7 @@ static const struct compiler default_compilers[] = | 45 | @@ -1487,7 +1487,7 @@ static const struct compiler default_compilers[] = |
46 | %W{o*:--output-pch %*}}%V}}}}}}}", 0, 0, 0}, | 46 | %W{o*:--output-pch %w%*}}%{!S:%V}}}}}}}}", 0, 0, 0}, |
47 | {".i", "@cpp-output", 0, 0, 0}, | 47 | {".i", "@cpp-output", 0, 0, 0}, |
48 | {"@cpp-output", | 48 | {"@cpp-output", |
49 | - "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, | 49 | - "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, |
diff --git a/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch b/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch index 079142c540..eacdbd1dbe 100644 --- a/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch +++ b/meta/recipes-devtools/gcc/gcc/0007-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From aacfd6e14dd583b1fdc65691def61c5e1bc89708 Mon Sep 17 00:00:00 2001 | 1 | From bce0b7e2e130a069a93c839b856aa19b64d5ca54 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:24:50 +0400 | 3 | Date: Fri, 29 Mar 2013 09:24:50 +0400 |
4 | Subject: [PATCH] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER | 4 | Subject: [PATCH] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER |
@@ -36,7 +36,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
36 | 18 files changed, 53 insertions(+), 58 deletions(-) | 36 | 18 files changed, 53 insertions(+), 58 deletions(-) |
37 | 37 | ||
38 | diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h | 38 | diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h |
39 | index 4277f03da2a..e4c92c03291 100644 | 39 | index 8e51c8202cc..732e4f4a477 100644 |
40 | --- a/gcc/config/aarch64/aarch64-linux.h | 40 | --- a/gcc/config/aarch64/aarch64-linux.h |
41 | +++ b/gcc/config/aarch64/aarch64-linux.h | 41 | +++ b/gcc/config/aarch64/aarch64-linux.h |
42 | @@ -21,10 +21,10 @@ | 42 | @@ -21,10 +21,10 @@ |
@@ -53,7 +53,7 @@ index 4277f03da2a..e4c92c03291 100644 | |||
53 | #undef ASAN_CC1_SPEC | 53 | #undef ASAN_CC1_SPEC |
54 | #define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}" | 54 | #define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}" |
55 | diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h | 55 | diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h |
56 | index 03f783f2ad1..4fa02668aa7 100644 | 56 | index 40ccf0bd9b4..ab5c59655fc 100644 |
57 | --- a/gcc/config/alpha/linux-elf.h | 57 | --- a/gcc/config/alpha/linux-elf.h |
58 | +++ b/gcc/config/alpha/linux-elf.h | 58 | +++ b/gcc/config/alpha/linux-elf.h |
59 | @@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see | 59 | @@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see |
@@ -68,7 +68,7 @@ index 03f783f2ad1..4fa02668aa7 100644 | |||
68 | #define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}" | 68 | #define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}" |
69 | #elif DEFAULT_LIBC == LIBC_GLIBC | 69 | #elif DEFAULT_LIBC == LIBC_GLIBC |
70 | diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h | 70 | diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h |
71 | index a119875599d..dce7f59eeea 100644 | 71 | index eef791f6a02..335cb4c3607 100644 |
72 | --- a/gcc/config/arm/linux-eabi.h | 72 | --- a/gcc/config/arm/linux-eabi.h |
73 | +++ b/gcc/config/arm/linux-eabi.h | 73 | +++ b/gcc/config/arm/linux-eabi.h |
74 | @@ -62,8 +62,8 @@ | 74 | @@ -62,8 +62,8 @@ |
@@ -92,7 +92,7 @@ index a119875599d..dce7f59eeea 100644 | |||
92 | /* At this point, bpabi.h will have clobbered LINK_SPEC. We want to | 92 | /* At this point, bpabi.h will have clobbered LINK_SPEC. We want to |
93 | use the GNU/Linux version, not the generic BPABI version. */ | 93 | use the GNU/Linux version, not the generic BPABI version. */ |
94 | diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h | 94 | diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h |
95 | index 7b7b7cbbe14..98ef2267117 100644 | 95 | index ccae8abf6f6..ea421d03891 100644 |
96 | --- a/gcc/config/arm/linux-elf.h | 96 | --- a/gcc/config/arm/linux-elf.h |
97 | +++ b/gcc/config/arm/linux-elf.h | 97 | +++ b/gcc/config/arm/linux-elf.h |
98 | @@ -60,7 +60,7 @@ | 98 | @@ -60,7 +60,7 @@ |
@@ -105,7 +105,7 @@ index 7b7b7cbbe14..98ef2267117 100644 | |||
105 | #define LINUX_TARGET_LINK_SPEC "%{h*} \ | 105 | #define LINUX_TARGET_LINK_SPEC "%{h*} \ |
106 | %{static:-Bstatic} \ | 106 | %{static:-Bstatic} \ |
107 | diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h | 107 | diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h |
108 | index bbb7cc7115e..7d9272040ee 100644 | 108 | index 20a96d56e65..7a7a4f873e4 100644 |
109 | --- a/gcc/config/i386/linux.h | 109 | --- a/gcc/config/i386/linux.h |
110 | +++ b/gcc/config/i386/linux.h | 110 | +++ b/gcc/config/i386/linux.h |
111 | @@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see | 111 | @@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see |
@@ -119,7 +119,7 @@ index bbb7cc7115e..7d9272040ee 100644 | |||
119 | -#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1" | 119 | -#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1" |
120 | +#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-i386.so.1" | 120 | +#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-i386.so.1" |
121 | diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h | 121 | diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h |
122 | index 2bd9f48e271..dbbe7ca5440 100644 | 122 | index 3dd23eef5e5..960cd7c0450 100644 |
123 | --- a/gcc/config/i386/linux64.h | 123 | --- a/gcc/config/i386/linux64.h |
124 | +++ b/gcc/config/i386/linux64.h | 124 | +++ b/gcc/config/i386/linux64.h |
125 | @@ -27,13 +27,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see | 125 | @@ -27,13 +27,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
@@ -143,7 +143,7 @@ index 2bd9f48e271..dbbe7ca5440 100644 | |||
143 | -#define MUSL_DYNAMIC_LINKERX32 "/lib/ld-musl-x32.so.1" | 143 | -#define MUSL_DYNAMIC_LINKERX32 "/lib/ld-musl-x32.so.1" |
144 | +#define MUSL_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-musl-x32.so.1" | 144 | +#define MUSL_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-musl-x32.so.1" |
145 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h | 145 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h |
146 | index e3aca79cccc..6491c6b84f5 100644 | 146 | index a7248ac90d0..e36f32bf59e 100644 |
147 | --- a/gcc/config/linux.h | 147 | --- a/gcc/config/linux.h |
148 | +++ b/gcc/config/linux.h | 148 | +++ b/gcc/config/linux.h |
149 | @@ -86,10 +86,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see | 149 | @@ -86,10 +86,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
@@ -162,25 +162,29 @@ index e3aca79cccc..6491c6b84f5 100644 | |||
162 | #define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker" | 162 | #define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker" |
163 | #define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64" | 163 | #define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64" |
164 | diff --git a/gcc/config/loongarch/gnu-user.h b/gcc/config/loongarch/gnu-user.h | 164 | diff --git a/gcc/config/loongarch/gnu-user.h b/gcc/config/loongarch/gnu-user.h |
165 | index aecaa02a199..62f88f7f9a2 100644 | 165 | index e5d84e180e3..98cb49c189f 100644 |
166 | --- a/gcc/config/loongarch/gnu-user.h | 166 | --- a/gcc/config/loongarch/gnu-user.h |
167 | +++ b/gcc/config/loongarch/gnu-user.h | 167 | +++ b/gcc/config/loongarch/gnu-user.h |
168 | @@ -31,11 +31,11 @@ along with GCC; see the file COPYING3. If not see | 168 | @@ -31,7 +31,7 @@ along with GCC; see the file COPYING3. If not see |
169 | 169 | ||
170 | #undef GLIBC_DYNAMIC_LINKER | 170 | #undef GLIBC_DYNAMIC_LINKER |
171 | #define GLIBC_DYNAMIC_LINKER \ | 171 | #define GLIBC_DYNAMIC_LINKER \ |
172 | - "/lib" ABI_GRLEN_SPEC "/ld-linux-loongarch-" ABI_SPEC ".so.1" | 172 | - "/lib" ABI_GRLEN_SPEC "/ld-linux-loongarch-" ABI_SPEC ".so.1" |
173 | + SYSTEMLIBS_DIR "ld-linux-loongarch-" ABI_SPEC ".so.1" | 173 | + SYSTEMLIBS_DIR "ld-linux-loongarch-" ABI_SPEC ".so.1" |
174 | 174 | ||
175 | #define MUSL_ABI_SPEC \ | ||
176 | "%{mabi=lp64d:}" \ | ||
177 | @@ -40,7 +40,7 @@ along with GCC; see the file COPYING3. If not see | ||
178 | |||
175 | #undef MUSL_DYNAMIC_LINKER | 179 | #undef MUSL_DYNAMIC_LINKER |
176 | #define MUSL_DYNAMIC_LINKER \ | 180 | #define MUSL_DYNAMIC_LINKER \ |
177 | - "/lib" ABI_GRLEN_SPEC "/ld-musl-loongarch-" ABI_SPEC ".so.1" | 181 | - "/lib/ld-musl-loongarch" ABI_GRLEN_SPEC MUSL_ABI_SPEC ".so.1" |
178 | + SYSTEMLIBS_DIR "ld-musl-loongarch-" ABI_SPEC ".so.1" | 182 | + SYSTEMLIBS_DIR "ld-musl-loongarch" ABI_GRLEN_SPEC MUSL_ABI_SPEC ".so.1" |
179 | 183 | ||
180 | #undef GNU_USER_TARGET_LINK_SPEC | 184 | #undef GNU_USER_TARGET_LINK_SPEC |
181 | #define GNU_USER_TARGET_LINK_SPEC \ | 185 | #define GNU_USER_TARGET_LINK_SPEC \ |
182 | diff --git a/gcc/config/microblaze/linux.h b/gcc/config/microblaze/linux.h | 186 | diff --git a/gcc/config/microblaze/linux.h b/gcc/config/microblaze/linux.h |
183 | index e2e2c421c52..6f26480e3b5 100644 | 187 | index 5ed8ee518be..5553e7cac21 100644 |
184 | --- a/gcc/config/microblaze/linux.h | 188 | --- a/gcc/config/microblaze/linux.h |
185 | +++ b/gcc/config/microblaze/linux.h | 189 | +++ b/gcc/config/microblaze/linux.h |
186 | @@ -28,7 +28,7 @@ | 190 | @@ -28,7 +28,7 @@ |
@@ -202,7 +206,7 @@ index e2e2c421c52..6f26480e3b5 100644 | |||
202 | #undef SUBTARGET_EXTRA_SPECS | 206 | #undef SUBTARGET_EXTRA_SPECS |
203 | #define SUBTARGET_EXTRA_SPECS \ | 207 | #define SUBTARGET_EXTRA_SPECS \ |
204 | diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h | 208 | diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h |
205 | index 5add34ea134..34692b433b8 100644 | 209 | index 8d098dd2f44..1c5ebbd26d5 100644 |
206 | --- a/gcc/config/mips/linux.h | 210 | --- a/gcc/config/mips/linux.h |
207 | +++ b/gcc/config/mips/linux.h | 211 | +++ b/gcc/config/mips/linux.h |
208 | @@ -22,29 +22,29 @@ along with GCC; see the file COPYING3. If not see | 212 | @@ -22,29 +22,29 @@ along with GCC; see the file COPYING3. If not see |
@@ -245,7 +249,7 @@ index 5add34ea134..34692b433b8 100644 | |||
245 | #define BIONIC_DYNAMIC_LINKERN32 "/system/bin/linker32" | 249 | #define BIONIC_DYNAMIC_LINKERN32 "/system/bin/linker32" |
246 | #define GNU_USER_DYNAMIC_LINKERN32 \ | 250 | #define GNU_USER_DYNAMIC_LINKERN32 \ |
247 | diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h | 251 | diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h |
248 | index 2ce097ebbce..1d45d7b4962 100644 | 252 | index 06c442e85f4..b21a3c076c9 100644 |
249 | --- a/gcc/config/nios2/linux.h | 253 | --- a/gcc/config/nios2/linux.h |
250 | +++ b/gcc/config/nios2/linux.h | 254 | +++ b/gcc/config/nios2/linux.h |
251 | @@ -29,10 +29,10 @@ | 255 | @@ -29,10 +29,10 @@ |
@@ -262,7 +266,7 @@ index 2ce097ebbce..1d45d7b4962 100644 | |||
262 | #undef LINK_SPEC | 266 | #undef LINK_SPEC |
263 | #define LINK_SPEC LINK_SPEC_ENDIAN \ | 267 | #define LINK_SPEC LINK_SPEC_ENDIAN \ |
264 | diff --git a/gcc/config/riscv/linux.h b/gcc/config/riscv/linux.h | 268 | diff --git a/gcc/config/riscv/linux.h b/gcc/config/riscv/linux.h |
265 | index 3e625e0f867..dc3afc97e27 100644 | 269 | index 15851f653bc..d8aaab9631f 100644 |
266 | --- a/gcc/config/riscv/linux.h | 270 | --- a/gcc/config/riscv/linux.h |
267 | +++ b/gcc/config/riscv/linux.h | 271 | +++ b/gcc/config/riscv/linux.h |
268 | @@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see | 272 | @@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see |
@@ -284,10 +288,10 @@ index 3e625e0f867..dc3afc97e27 100644 | |||
284 | #define ICACHE_FLUSH_FUNC "__riscv_flush_icache" | 288 | #define ICACHE_FLUSH_FUNC "__riscv_flush_icache" |
285 | 289 | ||
286 | diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h | 290 | diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h |
287 | index 9e457033d11..2ddab7c99c1 100644 | 291 | index 655d1054d76..a11ae7643b7 100644 |
288 | --- a/gcc/config/rs6000/linux64.h | 292 | --- a/gcc/config/rs6000/linux64.h |
289 | +++ b/gcc/config/rs6000/linux64.h | 293 | +++ b/gcc/config/rs6000/linux64.h |
290 | @@ -339,24 +339,19 @@ extern int dot_symbols; | 294 | @@ -340,24 +340,19 @@ extern int dot_symbols; |
291 | #undef LINK_OS_DEFAULT_SPEC | 295 | #undef LINK_OS_DEFAULT_SPEC |
292 | #define LINK_OS_DEFAULT_SPEC "%(link_os_linux)" | 296 | #define LINK_OS_DEFAULT_SPEC "%(link_os_linux)" |
293 | 297 | ||
@@ -318,7 +322,7 @@ index 9e457033d11..2ddab7c99c1 100644 | |||
318 | #undef DEFAULT_ASM_ENDIAN | 322 | #undef DEFAULT_ASM_ENDIAN |
319 | #if (TARGET_DEFAULT & MASK_LITTLE_ENDIAN) | 323 | #if (TARGET_DEFAULT & MASK_LITTLE_ENDIAN) |
320 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h | 324 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h |
321 | index ae932fc22f0..26db003cb3a 100644 | 325 | index bcafa9d0cf4..8fda952e69c 100644 |
322 | --- a/gcc/config/rs6000/sysv4.h | 326 | --- a/gcc/config/rs6000/sysv4.h |
323 | +++ b/gcc/config/rs6000/sysv4.h | 327 | +++ b/gcc/config/rs6000/sysv4.h |
324 | @@ -771,10 +771,10 @@ GNU_USER_TARGET_CC1_SPEC | 328 | @@ -771,10 +771,10 @@ GNU_USER_TARGET_CC1_SPEC |
@@ -335,7 +339,7 @@ index ae932fc22f0..26db003cb3a 100644 | |||
335 | #ifndef GNU_USER_DYNAMIC_LINKER | 339 | #ifndef GNU_USER_DYNAMIC_LINKER |
336 | #define GNU_USER_DYNAMIC_LINKER GLIBC_DYNAMIC_LINKER | 340 | #define GNU_USER_DYNAMIC_LINKER GLIBC_DYNAMIC_LINKER |
337 | diff --git a/gcc/config/s390/linux.h b/gcc/config/s390/linux.h | 341 | diff --git a/gcc/config/s390/linux.h b/gcc/config/s390/linux.h |
338 | index 02aa1edaff8..fab268d61f4 100644 | 342 | index 83ea2f669a0..bc7cffbad3f 100644 |
339 | --- a/gcc/config/s390/linux.h | 343 | --- a/gcc/config/s390/linux.h |
340 | +++ b/gcc/config/s390/linux.h | 344 | +++ b/gcc/config/s390/linux.h |
341 | @@ -72,13 +72,13 @@ along with GCC; see the file COPYING3. If not see | 345 | @@ -72,13 +72,13 @@ along with GCC; see the file COPYING3. If not see |
@@ -357,7 +361,7 @@ index 02aa1edaff8..fab268d61f4 100644 | |||
357 | #undef LINK_SPEC | 361 | #undef LINK_SPEC |
358 | #define LINK_SPEC \ | 362 | #define LINK_SPEC \ |
359 | diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h | 363 | diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h |
360 | index 29f5902b98b..83d1e53e6e2 100644 | 364 | index 34cbbed633f..14000d3394c 100644 |
361 | --- a/gcc/config/sh/linux.h | 365 | --- a/gcc/config/sh/linux.h |
362 | +++ b/gcc/config/sh/linux.h | 366 | +++ b/gcc/config/sh/linux.h |
363 | @@ -61,10 +61,10 @@ along with GCC; see the file COPYING3. If not see | 367 | @@ -61,10 +61,10 @@ along with GCC; see the file COPYING3. If not see |
@@ -374,7 +378,7 @@ index 29f5902b98b..83d1e53e6e2 100644 | |||
374 | #undef SUBTARGET_LINK_EMUL_SUFFIX | 378 | #undef SUBTARGET_LINK_EMUL_SUFFIX |
375 | #define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}" | 379 | #define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}" |
376 | diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h | 380 | diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h |
377 | index 0e33b3cac2c..84f29adbb35 100644 | 381 | index 9646fa9c41e..396fb047fac 100644 |
378 | --- a/gcc/config/sparc/linux.h | 382 | --- a/gcc/config/sparc/linux.h |
379 | +++ b/gcc/config/sparc/linux.h | 383 | +++ b/gcc/config/sparc/linux.h |
380 | @@ -78,7 +78,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv); | 384 | @@ -78,7 +78,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv); |
@@ -387,7 +391,7 @@ index 0e33b3cac2c..84f29adbb35 100644 | |||
387 | #undef LINK_SPEC | 391 | #undef LINK_SPEC |
388 | #define LINK_SPEC "-m elf32_sparc %{shared:-shared} \ | 392 | #define LINK_SPEC "-m elf32_sparc %{shared:-shared} \ |
389 | diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h | 393 | diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h |
390 | index f1cc0a19e49..94bc2032803 100644 | 394 | index 1e2e4aef2ad..25fcc37eca2 100644 |
391 | --- a/gcc/config/sparc/linux64.h | 395 | --- a/gcc/config/sparc/linux64.h |
392 | +++ b/gcc/config/sparc/linux64.h | 396 | +++ b/gcc/config/sparc/linux64.h |
393 | @@ -78,8 +78,8 @@ along with GCC; see the file COPYING3. If not see | 397 | @@ -78,8 +78,8 @@ along with GCC; see the file COPYING3. If not see |
diff --git a/meta/recipes-devtools/gcc/gcc/0008-libtool.patch b/meta/recipes-devtools/gcc/gcc/0008-libtool.patch index 5b44dc809e..9b78377493 100644 --- a/meta/recipes-devtools/gcc/gcc/0008-libtool.patch +++ b/meta/recipes-devtools/gcc/gcc/0008-libtool.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 7608e93ab97e8c33e3b14323d0cabc651926e403 Mon Sep 17 00:00:00 2001 | 1 | From 869295f6f1095b6334c1ea34f8d11a676c67e256 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:29:11 +0400 | 3 | Date: Fri, 29 Mar 2013 09:29:11 +0400 |
4 | Subject: [PATCH] libtool | 4 | Subject: [PATCH] libtool |
diff --git a/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch b/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch index 86542bd146..3e24e53d7b 100644 --- a/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch +++ b/meta/recipes-devtools/gcc/gcc/0009-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 4b0efc18e0d91967a3db11d9ef0595a5a76ad67a Mon Sep 17 00:00:00 2001 | 1 | From 19279e08417124b4567fafdf9300b779ce1a1e1c Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:30:32 +0400 | 3 | Date: Fri, 29 Mar 2013 09:30:32 +0400 |
4 | Subject: [PATCH] gcc: armv4: pass fix-v4bx to linker to support EABI. | 4 | Subject: [PATCH] gcc: armv4: pass fix-v4bx to linker to support EABI. |
@@ -18,7 +18,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
18 | 1 file changed, 5 insertions(+), 1 deletion(-) | 18 | 1 file changed, 5 insertions(+), 1 deletion(-) |
19 | 19 | ||
20 | diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h | 20 | diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h |
21 | index dce7f59eeea..27402c629c6 100644 | 21 | index 335cb4c3607..1d5235075cb 100644 |
22 | --- a/gcc/config/arm/linux-eabi.h | 22 | --- a/gcc/config/arm/linux-eabi.h |
23 | +++ b/gcc/config/arm/linux-eabi.h | 23 | +++ b/gcc/config/arm/linux-eabi.h |
24 | @@ -88,10 +88,14 @@ | 24 | @@ -88,10 +88,14 @@ |
diff --git a/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch b/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch index bc3943087c..c3484360ee 100644 --- a/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch +++ b/meta/recipes-devtools/gcc/gcc/0010-Use-the-multilib-config-files-from-B-instead-of-usin.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From b015460586e2ea8a35a11d1a607728707bdf6509 Mon Sep 17 00:00:00 2001 | 1 | From 7e5706e74643d8bf78b117620849e546a31295c5 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 29 Mar 2013 09:33:04 +0400 | 3 | Date: Fri, 29 Mar 2013 09:33:04 +0400 |
4 | Subject: [PATCH] Use the multilib config files from ${B} instead of using the | 4 | Subject: [PATCH] Use the multilib config files from ${B} instead of using the |
@@ -18,10 +18,10 @@ Upstream-Status: Inappropriate [configuration] | |||
18 | 2 files changed, 36 insertions(+), 8 deletions(-) | 18 | 2 files changed, 36 insertions(+), 8 deletions(-) |
19 | 19 | ||
20 | diff --git a/gcc/configure b/gcc/configure | 20 | diff --git a/gcc/configure b/gcc/configure |
21 | index cf773a8b854..448a1ec093e 100755 | 21 | index 3870d6e9b6d..07a0b8571d2 100755 |
22 | --- a/gcc/configure | 22 | --- a/gcc/configure |
23 | +++ b/gcc/configure | 23 | +++ b/gcc/configure |
24 | @@ -13487,10 +13487,20 @@ done | 24 | @@ -15135,10 +15135,20 @@ done |
25 | tmake_file_= | 25 | tmake_file_= |
26 | for f in ${tmake_file} | 26 | for f in ${tmake_file} |
27 | do | 27 | do |
@@ -46,7 +46,7 @@ index cf773a8b854..448a1ec093e 100755 | |||
46 | done | 46 | done |
47 | tmake_file="${tmake_file_}${omp_device_property_tmake_file}" | 47 | tmake_file="${tmake_file_}${omp_device_property_tmake_file}" |
48 | 48 | ||
49 | @@ -13501,6 +13511,10 @@ tm_file_list="options.h" | 49 | @@ -15149,6 +15159,10 @@ tm_file_list="options.h" |
50 | tm_include_list="options.h insn-constants.h" | 50 | tm_include_list="options.h insn-constants.h" |
51 | for f in $tm_file; do | 51 | for f in $tm_file; do |
52 | case $f in | 52 | case $f in |
@@ -58,10 +58,10 @@ index cf773a8b854..448a1ec093e 100755 | |||
58 | f=`echo $f | sed 's/^..//'` | 58 | f=`echo $f | sed 's/^..//'` |
59 | tm_file_list="${tm_file_list} $f" | 59 | tm_file_list="${tm_file_list} $f" |
60 | diff --git a/gcc/configure.ac b/gcc/configure.ac | 60 | diff --git a/gcc/configure.ac b/gcc/configure.ac |
61 | index 22591478b72..b6e7f5149a7 100644 | 61 | index 15670de1706..fd96dd7ba49 100644 |
62 | --- a/gcc/configure.ac | 62 | --- a/gcc/configure.ac |
63 | +++ b/gcc/configure.ac | 63 | +++ b/gcc/configure.ac |
64 | @@ -2337,10 +2337,20 @@ done | 64 | @@ -2363,10 +2363,20 @@ done |
65 | tmake_file_= | 65 | tmake_file_= |
66 | for f in ${tmake_file} | 66 | for f in ${tmake_file} |
67 | do | 67 | do |
@@ -86,7 +86,7 @@ index 22591478b72..b6e7f5149a7 100644 | |||
86 | done | 86 | done |
87 | tmake_file="${tmake_file_}${omp_device_property_tmake_file}" | 87 | tmake_file="${tmake_file_}${omp_device_property_tmake_file}" |
88 | 88 | ||
89 | @@ -2351,6 +2361,10 @@ tm_file_list="options.h" | 89 | @@ -2377,6 +2387,10 @@ tm_file_list="options.h" |
90 | tm_include_list="options.h insn-constants.h" | 90 | tm_include_list="options.h insn-constants.h" |
91 | for f in $tm_file; do | 91 | for f in $tm_file; do |
92 | case $f in | 92 | case $f in |
diff --git a/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch b/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch index 974aca5ee4..8633f7c436 100644 --- a/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch +++ b/meta/recipes-devtools/gcc/gcc/0011-aarch64-Fix-include-paths-when-S-B.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From b7ce05b2d969b311c6061bda32c3117c76bf7e0c Mon Sep 17 00:00:00 2001 | 1 | From 0671715ae6d411b5d2d4c53fee42a6e9fe2562c2 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Tue, 31 Jan 2023 22:03:38 -0800 | 3 | Date: Tue, 31 Jan 2023 22:03:38 -0800 |
4 | Subject: [PATCH] aarch64: Fix include paths when S != B | 4 | Subject: [PATCH] aarch64: Fix include paths when S != B |
@@ -24,32 +24,44 @@ Upstream-Status: Pending | |||
24 | 24 | ||
25 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 25 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
26 | --- | 26 | --- |
27 | gcc/config/aarch64/aarch64.h | 8 ++++---- | 27 | gcc/config/aarch64/aarch64.h | 14 +++++++------- |
28 | 1 file changed, 4 insertions(+), 4 deletions(-) | 28 | 1 file changed, 7 insertions(+), 7 deletions(-) |
29 | 29 | ||
30 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | 30 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h |
31 | index 73b09e20508..10ea3672f20 100644 | 31 | index 45e901cda64..591062d4b39 100644 |
32 | --- a/gcc/config/aarch64/aarch64.h | 32 | --- a/gcc/config/aarch64/aarch64.h |
33 | +++ b/gcc/config/aarch64/aarch64.h | 33 | +++ b/gcc/config/aarch64/aarch64.h |
34 | @@ -161,8 +161,8 @@ | 34 | @@ -170,9 +170,9 @@ enum class aarch64_feature : unsigned char { |
35 | enum class aarch64_feature : unsigned char { | 35 | #define DEF_AARCH64_ISA_MODE(IDENT) IDENT, |
36 | #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) IDENT, | 36 | #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) IDENT, |
37 | #define AARCH64_ARCH(A, B, IDENT, D, E) IDENT, | 37 | #define AARCH64_ARCH(A, B, IDENT, D, E) IDENT, |
38 | -#include "aarch64-isa-modes.def" | ||
38 | -#include "aarch64-option-extensions.def" | 39 | -#include "aarch64-option-extensions.def" |
39 | -#include "aarch64-arches.def" | 40 | -#include "aarch64-arches.def" |
41 | +#include "config/aarch64/aarch64-isa-modes.def" | ||
40 | +#include "config/aarch64/aarch64-option-extensions.def" | 42 | +#include "config/aarch64/aarch64-option-extensions.def" |
41 | +#include "config/aarch64/aarch64-arches.def" | 43 | +#include "config/aarch64/aarch64-arches.def" |
42 | }; | 44 | }; |
43 | 45 | ||
44 | /* Define unique flags for each of the above. */ | 46 | /* Define unique flags for each of the above. */ |
45 | @@ -171,8 +171,8 @@ enum class aarch64_feature : unsigned char { | 47 | @@ -182,16 +182,16 @@ enum class aarch64_feature : unsigned char { |
46 | = aarch64_feature_flags (1) << int (aarch64_feature::IDENT); | 48 | #define DEF_AARCH64_ISA_MODE(IDENT) HANDLE (IDENT) |
47 | #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) HANDLE (IDENT) | 49 | #define AARCH64_OPT_EXTENSION(A, IDENT, C, D, E, F) HANDLE (IDENT) |
48 | #define AARCH64_ARCH(A, B, IDENT, D, E) HANDLE (IDENT) | 50 | #define AARCH64_ARCH(A, B, IDENT, D, E) HANDLE (IDENT) |
51 | -#include "aarch64-isa-modes.def" | ||
49 | -#include "aarch64-option-extensions.def" | 52 | -#include "aarch64-option-extensions.def" |
50 | -#include "aarch64-arches.def" | 53 | -#include "aarch64-arches.def" |
54 | +#include "config/aarch64/aarch64-isa-modes.def" | ||
51 | +#include "config/aarch64/aarch64-option-extensions.def" | 55 | +#include "config/aarch64/aarch64-option-extensions.def" |
52 | +#include "config/aarch64/aarch64-arches.def" | 56 | +#include "config/aarch64/aarch64-arches.def" |
53 | #undef HANDLE | 57 | #undef HANDLE |
54 | 58 | ||
55 | #endif | 59 | constexpr auto AARCH64_FL_SM_STATE = AARCH64_FL_SM_ON | AARCH64_FL_SM_OFF; |
60 | |||
61 | constexpr unsigned int AARCH64_NUM_ISA_MODES = (0 | ||
62 | #define DEF_AARCH64_ISA_MODE(IDENT) + 1 | ||
63 | -#include "aarch64-isa-modes.def" | ||
64 | +#include "config/aarch64/aarch64-isa-modes.def" | ||
65 | ); | ||
66 | |||
67 | /* The mask of all ISA modes. */ | ||
diff --git a/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch b/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch index cf5efcd32d..2f6ecd53bb 100644 --- a/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch +++ b/meta/recipes-devtools/gcc/gcc/0012-Avoid-using-libdir-from-.la-which-usually-points-to-.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 39ab6fe76f2788b2c989d29c9016f1fe53cb736e Mon Sep 17 00:00:00 2001 | 1 | From a40108180f552788bb321a853ed4d5f034866a48 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 20 Feb 2015 09:39:38 +0000 | 3 | Date: Fri, 20 Feb 2015 09:39:38 +0000 |
4 | Subject: [PATCH] Avoid using libdir from .la which usually points to a host | 4 | Subject: [PATCH] Avoid using libdir from .la which usually points to a host |
diff --git a/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch b/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch index e2343a3c17..eb4f0b3045 100644 --- a/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch +++ b/meta/recipes-devtools/gcc/gcc/0013-Ensure-target-gcc-headers-can-be-included.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 531b9df680c4380797e8e7705a8e7f8ed17ebe68 Mon Sep 17 00:00:00 2001 | 1 | From 6ad8db533c7d53a85e6280da1ad88ed4077258fb Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Fri, 20 Feb 2015 10:25:11 +0000 | 3 | Date: Fri, 20 Feb 2015 10:25:11 +0000 |
4 | Subject: [PATCH] Ensure target gcc headers can be included | 4 | Subject: [PATCH] Ensure target gcc headers can be included |
@@ -25,10 +25,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
25 | 4 files changed, 22 insertions(+) | 25 | 4 files changed, 22 insertions(+) |
26 | 26 | ||
27 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in | 27 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in |
28 | index 04f28984b34..8ef996c0f4d 100644 | 28 | index 956437df95b..6a009c696e9 100644 |
29 | --- a/gcc/Makefile.in | 29 | --- a/gcc/Makefile.in |
30 | +++ b/gcc/Makefile.in | 30 | +++ b/gcc/Makefile.in |
31 | @@ -640,6 +640,7 @@ libexecdir = @libexecdir@ | 31 | @@ -671,6 +671,7 @@ libexecdir = @libexecdir@ |
32 | 32 | ||
33 | # Directory in which the compiler finds libraries etc. | 33 | # Directory in which the compiler finds libraries etc. |
34 | libsubdir = $(libdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) | 34 | libsubdir = $(libdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) |
@@ -36,7 +36,7 @@ index 04f28984b34..8ef996c0f4d 100644 | |||
36 | # Directory in which the compiler finds executables | 36 | # Directory in which the compiler finds executables |
37 | libexecsubdir = $(libexecdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) | 37 | libexecsubdir = $(libexecdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) |
38 | # Directory in which all plugin resources are installed | 38 | # Directory in which all plugin resources are installed |
39 | @@ -3059,6 +3060,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\" | 39 | @@ -3199,6 +3200,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\" |
40 | 40 | ||
41 | PREPROCESSOR_DEFINES = \ | 41 | PREPROCESSOR_DEFINES = \ |
42 | -DGCC_INCLUDE_DIR=\"$(libsubdir)/include\" \ | 42 | -DGCC_INCLUDE_DIR=\"$(libsubdir)/include\" \ |
@@ -45,7 +45,7 @@ index 04f28984b34..8ef996c0f4d 100644 | |||
45 | -DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \ | 45 | -DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \ |
46 | -DGPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT=$(gcc_gxx_include_dir_add_sysroot) \ | 46 | -DGPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT=$(gcc_gxx_include_dir_add_sysroot) \ |
47 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h | 47 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h |
48 | index 6491c6b84f5..57496ff1f2f 100644 | 48 | index e36f32bf59e..c6aada42eb7 100644 |
49 | --- a/gcc/config/linux.h | 49 | --- a/gcc/config/linux.h |
50 | +++ b/gcc/config/linux.h | 50 | +++ b/gcc/config/linux.h |
51 | @@ -157,6 +157,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see | 51 | @@ -157,6 +157,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
@@ -71,7 +71,7 @@ index 6491c6b84f5..57496ff1f2f 100644 | |||
71 | { GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \ | 71 | { GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \ |
72 | { 0, 0, 0, 0, 0, 0 } \ | 72 | { 0, 0, 0, 0, 0, 0 } \ |
73 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h | 73 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h |
74 | index 26db003cb3a..3a443abcf6b 100644 | 74 | index 8fda952e69c..247acec9d8e 100644 |
75 | --- a/gcc/config/rs6000/sysv4.h | 75 | --- a/gcc/config/rs6000/sysv4.h |
76 | +++ b/gcc/config/rs6000/sysv4.h | 76 | +++ b/gcc/config/rs6000/sysv4.h |
77 | @@ -986,6 +986,13 @@ ncrtn.o%s" | 77 | @@ -986,6 +986,13 @@ ncrtn.o%s" |
@@ -97,7 +97,7 @@ index 26db003cb3a..3a443abcf6b 100644 | |||
97 | { GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \ | 97 | { GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \ |
98 | { 0, 0, 0, 0, 0, 0 } \ | 98 | { 0, 0, 0, 0, 0, 0 } \ |
99 | diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc | 99 | diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc |
100 | index 141bb4d25f6..734590a7059 100644 | 100 | index f82b4badad7..98bd66f95c6 100644 |
101 | --- a/gcc/cppdefault.cc | 101 | --- a/gcc/cppdefault.cc |
102 | +++ b/gcc/cppdefault.cc | 102 | +++ b/gcc/cppdefault.cc |
103 | @@ -64,6 +64,10 @@ const struct default_include cpp_include_defaults[] | 103 | @@ -64,6 +64,10 @@ const struct default_include cpp_include_defaults[] |
diff --git a/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch b/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch index 30224d7485..a9cbc08f1a 100644 --- a/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch +++ b/meta/recipes-devtools/gcc/gcc/0014-Don-t-search-host-directory-during-relink-if-inst_pr.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 793201cebfeb129f6f263e64310b30a0ffa48895 Mon Sep 17 00:00:00 2001 | 1 | From e0d53bf9d59438a1dce0774163fa8c41cf32f306 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Tue, 3 Mar 2015 08:21:19 +0000 | 3 | Date: Tue, 3 Mar 2015 08:21:19 +0000 |
4 | Subject: [PATCH] Don't search host directory during "relink" if $inst_prefix | 4 | Subject: [PATCH] Don't search host directory during "relink" if $inst_prefix |
diff --git a/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch b/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch index 33c601ac97..8eb00287fc 100644 --- a/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch +++ b/meta/recipes-devtools/gcc/gcc/0015-libcc1-fix-libcc1-s-install-path-and-rpath.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 5de63874335c1c673dd132f6aca00dc13f1eac51 Mon Sep 17 00:00:00 2001 | 1 | From c2ea8aaa7b00c519c9c269c23ff840138ae491ac Mon Sep 17 00:00:00 2001 |
2 | From: Robert Yang <liezhi.yang@windriver.com> | 2 | From: Robert Yang <liezhi.yang@windriver.com> |
3 | Date: Sun, 5 Jul 2015 20:25:18 -0700 | 3 | Date: Sun, 5 Jul 2015 20:25:18 -0700 |
4 | Subject: [PATCH] libcc1: fix libcc1's install path and rpath | 4 | Subject: [PATCH] libcc1: fix libcc1's install path and rpath |
@@ -20,7 +20,7 @@ Signed-off-by: Robert Yang <liezhi.yang@windriver.com> | |||
20 | 2 files changed, 4 insertions(+), 4 deletions(-) | 20 | 2 files changed, 4 insertions(+), 4 deletions(-) |
21 | 21 | ||
22 | diff --git a/libcc1/Makefile.am b/libcc1/Makefile.am | 22 | diff --git a/libcc1/Makefile.am b/libcc1/Makefile.am |
23 | index 921a33fe236..938e6f964cd 100644 | 23 | index b592bc8645f..97e2b615fb7 100644 |
24 | --- a/libcc1/Makefile.am | 24 | --- a/libcc1/Makefile.am |
25 | +++ b/libcc1/Makefile.am | 25 | +++ b/libcc1/Makefile.am |
26 | @@ -40,8 +40,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ | 26 | @@ -40,8 +40,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ |
diff --git a/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch b/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch index bdffcae7dc..82342b0532 100644 --- a/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch +++ b/meta/recipes-devtools/gcc/gcc/0016-handle-sysroot-support-for-nativesdk-gcc.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From bbc75b93bff66891fa7ffb3af5c6ad53df1fff68 Mon Sep 17 00:00:00 2001 | 1 | From 82da570396a6835e7208c8bb508220524b3a07c7 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 7 Dec 2015 23:39:54 +0000 | 3 | Date: Mon, 7 Dec 2015 23:39:54 +0000 |
4 | Subject: [PATCH] handle sysroot support for nativesdk-gcc | 4 | Subject: [PATCH] handle sysroot support for nativesdk-gcc |
@@ -46,10 +46,10 @@ Signed-off-by: Mark Hatle <mark.hatle@kernel.crashing.org> | |||
46 | 8 files changed, 95 insertions(+), 73 deletions(-) | 46 | 8 files changed, 95 insertions(+), 73 deletions(-) |
47 | 47 | ||
48 | diff --git a/gcc/c-family/c-opts.cc b/gcc/c-family/c-opts.cc | 48 | diff --git a/gcc/c-family/c-opts.cc b/gcc/c-family/c-opts.cc |
49 | index c68a2a27469..77e9b5eceaa 100644 | 49 | index be3058dca63..0be85374760 100644 |
50 | --- a/gcc/c-family/c-opts.cc | 50 | --- a/gcc/c-family/c-opts.cc |
51 | +++ b/gcc/c-family/c-opts.cc | 51 | +++ b/gcc/c-family/c-opts.cc |
52 | @@ -1458,8 +1458,8 @@ add_prefixed_path (const char *suffix, incpath_kind chain) | 52 | @@ -1542,8 +1542,8 @@ add_prefixed_path (const char *suffix, incpath_kind chain) |
53 | size_t prefix_len, suffix_len; | 53 | size_t prefix_len, suffix_len; |
54 | 54 | ||
55 | suffix_len = strlen (suffix); | 55 | suffix_len = strlen (suffix); |
@@ -61,7 +61,7 @@ index c68a2a27469..77e9b5eceaa 100644 | |||
61 | path = (char *) xmalloc (prefix_len + suffix_len + 1); | 61 | path = (char *) xmalloc (prefix_len + suffix_len + 1); |
62 | memcpy (path, prefix, prefix_len); | 62 | memcpy (path, prefix, prefix_len); |
63 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h | 63 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h |
64 | index 57496ff1f2f..c921cf6ef63 100644 | 64 | index c6aada42eb7..79ff13184d3 100644 |
65 | --- a/gcc/config/linux.h | 65 | --- a/gcc/config/linux.h |
66 | +++ b/gcc/config/linux.h | 66 | +++ b/gcc/config/linux.h |
67 | @@ -121,53 +121,53 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see | 67 | @@ -121,53 +121,53 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
@@ -139,7 +139,7 @@ index 57496ff1f2f..c921cf6ef63 100644 | |||
139 | } | 139 | } |
140 | #endif | 140 | #endif |
141 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h | 141 | diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h |
142 | index 3a443abcf6b..ef83a5a432e 100644 | 142 | index 247acec9d8e..5526bf050b8 100644 |
143 | --- a/gcc/config/rs6000/sysv4.h | 143 | --- a/gcc/config/rs6000/sysv4.h |
144 | +++ b/gcc/config/rs6000/sysv4.h | 144 | +++ b/gcc/config/rs6000/sysv4.h |
145 | @@ -950,53 +950,53 @@ ncrtn.o%s" | 145 | @@ -950,53 +950,53 @@ ncrtn.o%s" |
@@ -217,7 +217,7 @@ index 3a443abcf6b..ef83a5a432e 100644 | |||
217 | } | 217 | } |
218 | #endif | 218 | #endif |
219 | diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc | 219 | diff --git a/gcc/cppdefault.cc b/gcc/cppdefault.cc |
220 | index 734590a7059..b4a8fc29e4a 100644 | 220 | index 98bd66f95c6..742a4e56d5b 100644 |
221 | --- a/gcc/cppdefault.cc | 221 | --- a/gcc/cppdefault.cc |
222 | +++ b/gcc/cppdefault.cc | 222 | +++ b/gcc/cppdefault.cc |
223 | @@ -35,6 +35,30 @@ | 223 | @@ -35,6 +35,30 @@ |
@@ -347,7 +347,7 @@ index 734590a7059..b4a8fc29e4a 100644 | |||
347 | /* This value is set by cpp_relocated at runtime */ | 347 | /* This value is set by cpp_relocated at runtime */ |
348 | const char *gcc_exec_prefix; | 348 | const char *gcc_exec_prefix; |
349 | diff --git a/gcc/cppdefault.h b/gcc/cppdefault.h | 349 | diff --git a/gcc/cppdefault.h b/gcc/cppdefault.h |
350 | index e26b424e99c..c9abb090dcd 100644 | 350 | index e9b9264caff..168144b84b8 100644 |
351 | --- a/gcc/cppdefault.h | 351 | --- a/gcc/cppdefault.h |
352 | +++ b/gcc/cppdefault.h | 352 | +++ b/gcc/cppdefault.h |
353 | @@ -33,7 +33,8 @@ | 353 | @@ -33,7 +33,8 @@ |
@@ -382,10 +382,10 @@ index e26b424e99c..c9abb090dcd 100644 | |||
382 | subdirectory of the actual installation. */ | 382 | subdirectory of the actual installation. */ |
383 | extern const char *gcc_exec_prefix; | 383 | extern const char *gcc_exec_prefix; |
384 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc | 384 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc |
385 | index 8af0c814c33..605fe3b8c0d 100644 | 385 | index e3e9374d1cc..b080609341b 100644 |
386 | --- a/gcc/gcc.cc | 386 | --- a/gcc/gcc.cc |
387 | +++ b/gcc/gcc.cc | 387 | +++ b/gcc/gcc.cc |
388 | @@ -255,6 +255,8 @@ FILE *report_times_to_file = NULL; | 388 | @@ -259,6 +259,8 @@ FILE *report_times_to_file = NULL; |
389 | #endif | 389 | #endif |
390 | static const char *target_system_root = DEFAULT_TARGET_SYSTEM_ROOT; | 390 | static const char *target_system_root = DEFAULT_TARGET_SYSTEM_ROOT; |
391 | 391 | ||
@@ -394,7 +394,7 @@ index 8af0c814c33..605fe3b8c0d 100644 | |||
394 | /* Nonzero means pass the updated target_system_root to the compiler. */ | 394 | /* Nonzero means pass the updated target_system_root to the compiler. */ |
395 | 395 | ||
396 | static int target_system_root_changed; | 396 | static int target_system_root_changed; |
397 | @@ -578,6 +580,7 @@ or with constant text in a single argument. | 397 | @@ -591,6 +593,7 @@ or with constant text in a single argument. |
398 | %G process LIBGCC_SPEC as a spec. | 398 | %G process LIBGCC_SPEC as a spec. |
399 | %R Output the concatenation of target_system_root and | 399 | %R Output the concatenation of target_system_root and |
400 | target_sysroot_suffix. | 400 | target_sysroot_suffix. |
@@ -402,7 +402,7 @@ index 8af0c814c33..605fe3b8c0d 100644 | |||
402 | %S process STARTFILE_SPEC as a spec. A capital S is actually used here. | 402 | %S process STARTFILE_SPEC as a spec. A capital S is actually used here. |
403 | %E process ENDFILE_SPEC as a spec. A capital E is actually used here. | 403 | %E process ENDFILE_SPEC as a spec. A capital E is actually used here. |
404 | %C process CPP_SPEC as a spec. | 404 | %C process CPP_SPEC as a spec. |
405 | @@ -1619,10 +1622,10 @@ static const char *gcc_libexec_prefix; | 405 | @@ -1638,10 +1641,10 @@ static const char *gcc_libexec_prefix; |
406 | gcc_exec_prefix is set because, in that case, we know where the | 406 | gcc_exec_prefix is set because, in that case, we know where the |
407 | compiler has been installed, and use paths relative to that | 407 | compiler has been installed, and use paths relative to that |
408 | location instead. */ | 408 | location instead. */ |
@@ -417,7 +417,7 @@ index 8af0c814c33..605fe3b8c0d 100644 | |||
417 | 417 | ||
418 | /* For native compilers, these are well-known paths containing | 418 | /* For native compilers, these are well-known paths containing |
419 | components that may be provided by the system. For cross | 419 | components that may be provided by the system. For cross |
420 | @@ -1630,9 +1633,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; | 420 | @@ -1649,9 +1652,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; |
421 | static const char *md_exec_prefix = MD_EXEC_PREFIX; | 421 | static const char *md_exec_prefix = MD_EXEC_PREFIX; |
422 | static const char *md_startfile_prefix = MD_STARTFILE_PREFIX; | 422 | static const char *md_startfile_prefix = MD_STARTFILE_PREFIX; |
423 | static const char *md_startfile_prefix_1 = MD_STARTFILE_PREFIX_1; | 423 | static const char *md_startfile_prefix_1 = MD_STARTFILE_PREFIX_1; |
@@ -429,7 +429,7 @@ index 8af0c814c33..605fe3b8c0d 100644 | |||
429 | = STANDARD_STARTFILE_PREFIX_2; | 429 | = STANDARD_STARTFILE_PREFIX_2; |
430 | 430 | ||
431 | /* A relative path to be used in finding the location of tools | 431 | /* A relative path to be used in finding the location of tools |
432 | @@ -6652,6 +6655,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part) | 432 | @@ -6796,6 +6799,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part) |
433 | } | 433 | } |
434 | break; | 434 | break; |
435 | 435 | ||
@@ -442,7 +442,7 @@ index 8af0c814c33..605fe3b8c0d 100644 | |||
442 | value = do_spec_1 (startfile_spec, 0, NULL); | 442 | value = do_spec_1 (startfile_spec, 0, NULL); |
443 | if (value != 0) | 443 | if (value != 0) |
444 | diff --git a/gcc/incpath.cc b/gcc/incpath.cc | 444 | diff --git a/gcc/incpath.cc b/gcc/incpath.cc |
445 | index 46c0d543205..d088dae7b04 100644 | 445 | index e572d98ab17..2cb2b586e4a 100644 |
446 | --- a/gcc/incpath.cc | 446 | --- a/gcc/incpath.cc |
447 | +++ b/gcc/incpath.cc | 447 | +++ b/gcc/incpath.cc |
448 | @@ -135,7 +135,7 @@ add_standard_paths (const char *sysroot, const char *iprefix, | 448 | @@ -135,7 +135,7 @@ add_standard_paths (const char *sysroot, const char *iprefix, |
@@ -489,7 +489,7 @@ index 46c0d543205..d088dae7b04 100644 | |||
489 | str = update_path (ostr, p->component); | 489 | str = update_path (ostr, p->component); |
490 | free (ostr); | 490 | free (ostr); |
491 | diff --git a/gcc/prefix.cc b/gcc/prefix.cc | 491 | diff --git a/gcc/prefix.cc b/gcc/prefix.cc |
492 | index c2a37bde5ea..33944701ced 100644 | 492 | index 6a98e856855..c403b2da7a3 100644 |
493 | --- a/gcc/prefix.cc | 493 | --- a/gcc/prefix.cc |
494 | +++ b/gcc/prefix.cc | 494 | +++ b/gcc/prefix.cc |
495 | @@ -73,7 +73,9 @@ License along with GCC; see the file COPYING3. If not see | 495 | @@ -73,7 +73,9 @@ License along with GCC; see the file COPYING3. If not see |
diff --git a/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch b/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch index 8a11049ca3..f87461bcaf 100644 --- a/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch +++ b/meta/recipes-devtools/gcc/gcc/0017-Search-target-sysroot-gcc-version-specific-dirs-with.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 7e095089452b6e895ec40981752e9f902f0ad889 Mon Sep 17 00:00:00 2001 | 1 | From cafcdfea0526dda5377a999dd9b1b8c805eafe0a Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 7 Dec 2015 23:41:45 +0000 | 3 | Date: Mon, 7 Dec 2015 23:41:45 +0000 |
4 | Subject: [PATCH] Search target sysroot gcc version specific dirs with | 4 | Subject: [PATCH] Search target sysroot gcc version specific dirs with |
@@ -52,10 +52,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
52 | 1 file changed, 28 insertions(+), 1 deletion(-) | 52 | 1 file changed, 28 insertions(+), 1 deletion(-) |
53 | 53 | ||
54 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc | 54 | diff --git a/gcc/gcc.cc b/gcc/gcc.cc |
55 | index 605fe3b8c0d..c3a1dab38c4 100644 | 55 | index b080609341b..1a1ca05064e 100644 |
56 | --- a/gcc/gcc.cc | 56 | --- a/gcc/gcc.cc |
57 | +++ b/gcc/gcc.cc | 57 | +++ b/gcc/gcc.cc |
58 | @@ -2809,7 +2809,7 @@ for_each_path (const struct path_prefix *paths, | 58 | @@ -2823,7 +2823,7 @@ for_each_path (const struct path_prefix *paths, |
59 | if (path == NULL) | 59 | if (path == NULL) |
60 | { | 60 | { |
61 | len = paths->max_len + extra_space + 1; | 61 | len = paths->max_len + extra_space + 1; |
@@ -64,7 +64,7 @@ index 605fe3b8c0d..c3a1dab38c4 100644 | |||
64 | path = XNEWVEC (char, len); | 64 | path = XNEWVEC (char, len); |
65 | } | 65 | } |
66 | 66 | ||
67 | @@ -2821,6 +2821,33 @@ for_each_path (const struct path_prefix *paths, | 67 | @@ -2835,6 +2835,33 @@ for_each_path (const struct path_prefix *paths, |
68 | /* Look first in MACHINE/VERSION subdirectory. */ | 68 | /* Look first in MACHINE/VERSION subdirectory. */ |
69 | if (!skip_multi_dir) | 69 | if (!skip_multi_dir) |
70 | { | 70 | { |
diff --git a/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch b/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch index 9bc77b485f..71e43e556d 100644 --- a/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch +++ b/meta/recipes-devtools/gcc/gcc/0018-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From bf92b290556b7050df0a001cc7ae43cf79990456 Mon Sep 17 00:00:00 2001 | 1 | From f2ad66276fd197cc088d7316662b1d792357ef6c Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Tue, 27 Jun 2017 18:10:54 -0700 | 3 | Date: Tue, 27 Jun 2017 18:10:54 -0700 |
4 | Subject: [PATCH] Add ssp_nonshared to link commandline for musl targets | 4 | Subject: [PATCH] Add ssp_nonshared to link commandline for musl targets |
@@ -23,7 +23,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
23 | 3 files changed, 27 insertions(+) | 23 | 3 files changed, 27 insertions(+) |
24 | 24 | ||
25 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h | 25 | diff --git a/gcc/config/linux.h b/gcc/config/linux.h |
26 | index c921cf6ef63..32e1bc1ae2d 100644 | 26 | index 79ff13184d3..19f552e8c7d 100644 |
27 | --- a/gcc/config/linux.h | 27 | --- a/gcc/config/linux.h |
28 | +++ b/gcc/config/linux.h | 28 | +++ b/gcc/config/linux.h |
29 | @@ -195,6 +195,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see | 29 | @@ -195,6 +195,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
@@ -41,10 +41,10 @@ index c921cf6ef63..32e1bc1ae2d 100644 | |||
41 | 41 | ||
42 | #if (DEFAULT_LIBC == LIBC_UCLIBC) && defined (SINGLE_LIBC) /* uClinux */ | 42 | #if (DEFAULT_LIBC == LIBC_UCLIBC) && defined (SINGLE_LIBC) /* uClinux */ |
43 | diff --git a/gcc/config/rs6000/linux.h b/gcc/config/rs6000/linux.h | 43 | diff --git a/gcc/config/rs6000/linux.h b/gcc/config/rs6000/linux.h |
44 | index 5d21befe8e4..1248a68e4ca 100644 | 44 | index 5f6cede558a..900993b6da4 100644 |
45 | --- a/gcc/config/rs6000/linux.h | 45 | --- a/gcc/config/rs6000/linux.h |
46 | +++ b/gcc/config/rs6000/linux.h | 46 | +++ b/gcc/config/rs6000/linux.h |
47 | @@ -99,6 +99,16 @@ | 47 | @@ -102,6 +102,16 @@ |
48 | " -m elf32ppclinux") | 48 | " -m elf32ppclinux") |
49 | #endif | 49 | #endif |
50 | 50 | ||
@@ -62,10 +62,10 @@ index 5d21befe8e4..1248a68e4ca 100644 | |||
62 | #define LINK_OS_LINUX_SPEC LINK_OS_LINUX_EMUL " %{!shared: %{!static: \ | 62 | #define LINK_OS_LINUX_SPEC LINK_OS_LINUX_EMUL " %{!shared: %{!static: \ |
63 | %{!static-pie: \ | 63 | %{!static-pie: \ |
64 | diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h | 64 | diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h |
65 | index 2ddab7c99c1..9641580fc83 100644 | 65 | index a11ae7643b7..2abde270813 100644 |
66 | --- a/gcc/config/rs6000/linux64.h | 66 | --- a/gcc/config/rs6000/linux64.h |
67 | +++ b/gcc/config/rs6000/linux64.h | 67 | +++ b/gcc/config/rs6000/linux64.h |
68 | @@ -372,6 +372,16 @@ extern int dot_symbols; | 68 | @@ -373,6 +373,16 @@ extern int dot_symbols; |
69 | " -m elf64ppc") | 69 | " -m elf64ppc") |
70 | #endif | 70 | #endif |
71 | 71 | ||
diff --git a/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch b/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch index f785688661..7f92dea0c6 100644 --- a/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch +++ b/meta/recipes-devtools/gcc/gcc/0019-Re-introduce-spe-commandline-options.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 587ac4a59ea56da18a9989c31a75124e974cb37c Mon Sep 17 00:00:00 2001 | 1 | From 65baa3804819fbcd540be48f4c4611be5cba43c4 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Wed, 6 Jun 2018 12:10:22 -0700 | 3 | Date: Wed, 6 Jun 2018 12:10:22 -0700 |
4 | Subject: [PATCH] Re-introduce spe commandline options | 4 | Subject: [PATCH] Re-introduce spe commandline options |
@@ -14,7 +14,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
14 | 1 file changed, 13 insertions(+) | 14 | 1 file changed, 13 insertions(+) |
15 | 15 | ||
16 | diff --git a/gcc/config/rs6000/rs6000.opt b/gcc/config/rs6000/rs6000.opt | 16 | diff --git a/gcc/config/rs6000/rs6000.opt b/gcc/config/rs6000/rs6000.opt |
17 | index bde6d3ff664..5af9640825c 100644 | 17 | index 83197681b66..04bea50b894 100644 |
18 | --- a/gcc/config/rs6000/rs6000.opt | 18 | --- a/gcc/config/rs6000/rs6000.opt |
19 | +++ b/gcc/config/rs6000/rs6000.opt | 19 | +++ b/gcc/config/rs6000/rs6000.opt |
20 | @@ -344,6 +344,19 @@ mdebug= | 20 | @@ -344,6 +344,19 @@ mdebug= |
diff --git a/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch b/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch index b86edab5d5..c5f43f5ba4 100644 --- a/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch +++ b/meta/recipes-devtools/gcc/gcc/0020-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 8c4c59521720f8c1b1e38e38896b47fcb1bf00ac Mon Sep 17 00:00:00 2001 | 1 | From 18b0057e4e2c5cf6fc6541670418f43f0381db3e Mon Sep 17 00:00:00 2001 |
2 | From: Szabolcs Nagy <nsz@port70.net> | 2 | From: Szabolcs Nagy <nsz@port70.net> |
3 | Date: Sat, 24 Oct 2015 20:09:53 +0000 | 3 | Date: Sat, 24 Oct 2015 20:09:53 +0000 |
4 | Subject: [PATCH] libgcc_s: Use alias for __cpu_indicator_init instead of | 4 | Subject: [PATCH] libgcc_s: Use alias for __cpu_indicator_init instead of |
@@ -39,10 +39,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
39 | 3 files changed, 6 insertions(+), 6 deletions(-) | 39 | 3 files changed, 6 insertions(+), 6 deletions(-) |
40 | 40 | ||
41 | diff --git a/gcc/config/i386/i386-expand.cc b/gcc/config/i386/i386-expand.cc | 41 | diff --git a/gcc/config/i386/i386-expand.cc b/gcc/config/i386/i386-expand.cc |
42 | index 0d817fc3f3b..8d34d19d3f9 100644 | 42 | index 8bb8f21e686..bcf976aa1b9 100644 |
43 | --- a/gcc/config/i386/i386-expand.cc | 43 | --- a/gcc/config/i386/i386-expand.cc |
44 | +++ b/gcc/config/i386/i386-expand.cc | 44 | +++ b/gcc/config/i386/i386-expand.cc |
45 | @@ -12691,10 +12691,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget, | 45 | @@ -13087,10 +13087,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget, |
46 | { | 46 | { |
47 | case IX86_BUILTIN_CPU_INIT: | 47 | case IX86_BUILTIN_CPU_INIT: |
48 | { | 48 | { |
@@ -56,7 +56,7 @@ index 0d817fc3f3b..8d34d19d3f9 100644 | |||
56 | return expand_expr (call_expr, target, mode, EXPAND_NORMAL); | 56 | return expand_expr (call_expr, target, mode, EXPAND_NORMAL); |
57 | } | 57 | } |
58 | diff --git a/libgcc/config/i386/cpuinfo.c b/libgcc/config/i386/cpuinfo.c | 58 | diff --git a/libgcc/config/i386/cpuinfo.c b/libgcc/config/i386/cpuinfo.c |
59 | index 50b6d8248a2..724ced402a1 100644 | 59 | index 0bd6dffe17e..ef4acb175a4 100644 |
60 | --- a/libgcc/config/i386/cpuinfo.c | 60 | --- a/libgcc/config/i386/cpuinfo.c |
61 | +++ b/libgcc/config/i386/cpuinfo.c | 61 | +++ b/libgcc/config/i386/cpuinfo.c |
62 | @@ -63,7 +63,7 @@ __cpu_indicator_init (void) | 62 | @@ -63,7 +63,7 @@ __cpu_indicator_init (void) |
diff --git a/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch b/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch index b6707592dc..dce2c047d0 100644 --- a/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch +++ b/meta/recipes-devtools/gcc/gcc/0021-gentypes-genmodes-Do-not-use-__LINE__-for-maintainin.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From f15b19d8e058c983c49c4566c1879fdaf5b1ab54 Mon Sep 17 00:00:00 2001 | 1 | From a9bb04b7cf02b2f2d16f68733488f9e48fc7e7a9 Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Tue, 10 Mar 2020 08:26:53 -0700 | 3 | Date: Tue, 10 Mar 2020 08:26:53 -0700 |
4 | Subject: [PATCH] gentypes/genmodes: Do not use __LINE__ for maintaining | 4 | Subject: [PATCH] gentypes/genmodes: Do not use __LINE__ for maintaining |
@@ -17,7 +17,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
17 | 2 files changed, 19 insertions(+), 19 deletions(-) | 17 | 2 files changed, 19 insertions(+), 19 deletions(-) |
18 | 18 | ||
19 | diff --git a/gcc/gengtype.cc b/gcc/gengtype.cc | 19 | diff --git a/gcc/gengtype.cc b/gcc/gengtype.cc |
20 | index 7763f40e9ab..4f0c1eb1508 100644 | 20 | index c0ecbe8558b..9f8ab7146c0 100644 |
21 | --- a/gcc/gengtype.cc | 21 | --- a/gcc/gengtype.cc |
22 | +++ b/gcc/gengtype.cc | 22 | +++ b/gcc/gengtype.cc |
23 | @@ -1005,7 +1005,7 @@ create_field_at (pair_p next, type_p type, const char *name, options_p opt, | 23 | @@ -1005,7 +1005,7 @@ create_field_at (pair_p next, type_p type, const char *name, options_p opt, |
@@ -38,7 +38,7 @@ index 7763f40e9ab..4f0c1eb1508 100644 | |||
38 | 38 | ||
39 | /* Reverse a linked list of 'struct pair's in place. */ | 39 | /* Reverse a linked list of 'struct pair's in place. */ |
40 | pair_p | 40 | pair_p |
41 | @@ -5223,7 +5223,7 @@ main (int argc, char **argv) | 41 | @@ -5232,7 +5232,7 @@ main (int argc, char **argv) |
42 | /* These types are set up with #define or else outside of where | 42 | /* These types are set up with #define or else outside of where |
43 | we can see them. We should initialize them before calling | 43 | we can see them. We should initialize them before calling |
44 | read_input_list. */ | 44 | read_input_list. */ |
@@ -48,7 +48,7 @@ index 7763f40e9ab..4f0c1eb1508 100644 | |||
48 | POS_HERE (do_scalar_typedef ("CUMULATIVE_ARGS", &pos)); | 48 | POS_HERE (do_scalar_typedef ("CUMULATIVE_ARGS", &pos)); |
49 | POS_HERE (do_scalar_typedef ("REAL_VALUE_TYPE", &pos)); | 49 | POS_HERE (do_scalar_typedef ("REAL_VALUE_TYPE", &pos)); |
50 | diff --git a/gcc/genmodes.cc b/gcc/genmodes.cc | 50 | diff --git a/gcc/genmodes.cc b/gcc/genmodes.cc |
51 | index 715787b8f48..302adff28d5 100644 | 51 | index eb3f9e2f0c1..5425635cb22 100644 |
52 | --- a/gcc/genmodes.cc | 52 | --- a/gcc/genmodes.cc |
53 | +++ b/gcc/genmodes.cc | 53 | +++ b/gcc/genmodes.cc |
54 | @@ -441,7 +441,7 @@ complete_all_modes (void) | 54 | @@ -441,7 +441,7 @@ complete_all_modes (void) |
diff --git a/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch b/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch index 0ea8aac543..d710b50594 100644 --- a/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch +++ b/meta/recipes-devtools/gcc/gcc/0022-libatomic-Do-not-enforce-march-on-aarch64.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 939a899b862f7a25e52b74d1587fc75fc65779c0 Mon Sep 17 00:00:00 2001 | 1 | From db4380eac3cd9baa2f31bcd2fb936ed51f27b415 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Wed, 13 May 2020 15:10:38 -0700 | 3 | Date: Wed, 13 May 2020 15:10:38 -0700 |
4 | Subject: [PATCH] libatomic: Do not enforce march on aarch64 | 4 | Subject: [PATCH] libatomic: Do not enforce march on aarch64 |
@@ -17,25 +17,25 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
17 | 2 files changed, 2 deletions(-) | 17 | 2 files changed, 2 deletions(-) |
18 | 18 | ||
19 | diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am | 19 | diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am |
20 | index c6c8d81c56a..d959a5d040e 100644 | 20 | index 980677f3533..26188917946 100644 |
21 | --- a/libatomic/Makefile.am | 21 | --- a/libatomic/Makefile.am |
22 | +++ b/libatomic/Makefile.am | 22 | +++ b/libatomic/Makefile.am |
23 | @@ -125,7 +125,6 @@ libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix _$(s)_.lo,$(SIZEOBJS))) | 23 | @@ -130,7 +130,6 @@ libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix _$(s)_.lo,$(SIZEOBJS))) |
24 | ## On a target-specific basis, include alternates to be selected by IFUNC. | 24 | ## On a target-specific basis, include alternates to be selected by IFUNC. |
25 | if HAVE_IFUNC | 25 | if HAVE_IFUNC |
26 | if ARCH_AARCH64_LINUX | 26 | if ARCH_AARCH64_LINUX |
27 | -IFUNC_OPTIONS = -march=armv8-a+lse | 27 | -IFUNC_OPTIONS = -march=armv8-a+lse |
28 | libatomic_la_LIBADD += $(foreach s,$(SIZES),$(addsuffix _$(s)_1_.lo,$(SIZEOBJS))) | 28 | libatomic_la_LIBADD += $(foreach s,$(SIZES),$(addsuffix _$(s)_1_.lo,$(SIZEOBJS))) |
29 | libatomic_la_SOURCES += atomic_16.S | ||
30 | 29 | ||
30 | endif | ||
31 | diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in | 31 | diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in |
32 | index a0fa3dfc8cc..e70d389874a 100644 | 32 | index d9d529bc502..47dacef9e01 100644 |
33 | --- a/libatomic/Makefile.in | 33 | --- a/libatomic/Makefile.in |
34 | +++ b/libatomic/Makefile.in | 34 | +++ b/libatomic/Makefile.in |
35 | @@ -447,7 +447,6 @@ M_SRC = $(firstword $(filter %/$(M_FILE), $(all_c_files))) | 35 | @@ -452,7 +452,6 @@ M_SRC = $(firstword $(filter %/$(M_FILE), $(all_c_files))) |
36 | libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix \ | 36 | libatomic_la_LIBADD = $(foreach s,$(SIZES),$(addsuffix \ |
37 | _$(s)_.lo,$(SIZEOBJS))) $(am__append_1) $(am__append_3) \ | 37 | _$(s)_.lo,$(SIZEOBJS))) $(am__append_1) $(am__append_2) \ |
38 | $(am__append_4) $(am__append_5) | 38 | $(am__append_3) $(am__append_4) |
39 | -@ARCH_AARCH64_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv8-a+lse | 39 | -@ARCH_AARCH64_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv8-a+lse |
40 | @ARCH_ARM_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv7-a+fp -DHAVE_KERNEL64 | 40 | @ARCH_ARM_LINUX_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=armv7-a+fp -DHAVE_KERNEL64 |
41 | @ARCH_I386_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=i586 | 41 | @ARCH_I386_TRUE@@HAVE_IFUNC_TRUE@IFUNC_OPTIONS = -march=i586 |
diff --git a/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch b/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch index cd962d82bd..7f1cd0b3d8 100644 --- a/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch +++ b/meta/recipes-devtools/gcc/gcc/0023-Fix-install-path-of-linux64.h.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 696d696381dd99ec2bddb1170f96f98da36eb418 Mon Sep 17 00:00:00 2001 | 1 | From 92a77026cc64f5eab4cb6b35e4a7c59e6ab8aa20 Mon Sep 17 00:00:00 2001 |
2 | From: Andrei Gherzan <andrei.gherzan@huawei.com> | 2 | From: Andrei Gherzan <andrei.gherzan@huawei.com> |
3 | Date: Wed, 22 Dec 2021 12:49:25 +0100 | 3 | Date: Wed, 22 Dec 2021 12:49:25 +0100 |
4 | Subject: [PATCH] Fix install path of linux64.h | 4 | Subject: [PATCH] Fix install path of linux64.h |
@@ -17,11 +17,11 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
17 | 1 file changed, 2 insertions(+) | 17 | 1 file changed, 2 insertions(+) |
18 | 18 | ||
19 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in | 19 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in |
20 | index 8ef996c0f4d..21daf380e34 100644 | 20 | index 6a009c696e9..da54dd42adf 100644 |
21 | --- a/gcc/Makefile.in | 21 | --- a/gcc/Makefile.in |
22 | +++ b/gcc/Makefile.in | 22 | +++ b/gcc/Makefile.in |
23 | @@ -3731,6 +3731,8 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype | 23 | @@ -3900,6 +3900,8 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype |
24 | "$(srcdir)"/config/* | "$(srcdir)"/common/config/* \ | 24 | | "$(srcdir)"/config/* | "$(srcdir)"/common/config/* \ |
25 | | "$(srcdir)"/c-family/* | "$(srcdir)"/*.def ) \ | 25 | | "$(srcdir)"/c-family/* | "$(srcdir)"/*.def ) \ |
26 | base=`echo "$$path" | sed -e "s|$$srcdirstrip/||"`;; \ | 26 | base=`echo "$$path" | sed -e "s|$$srcdirstrip/||"`;; \ |
27 | + */linux64.h ) \ | 27 | + */linux64.h ) \ |
diff --git a/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch b/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch index 04d940ae0b..a34e6afdb4 100644 --- a/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch +++ b/meta/recipes-devtools/gcc/gcc/0024-Avoid-hardcoded-build-paths-into-ppc-libgcc.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 9487b1d6136ea09cce4792d59d0170c712575550 Mon Sep 17 00:00:00 2001 | 1 | From c6fc577e9e89dc16d3e971aa165602746232de4c Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Sat, 20 Aug 2022 09:04:14 -0700 | 3 | Date: Sat, 20 Aug 2022 09:04:14 -0700 |
4 | Subject: [PATCH] Avoid hardcoded build paths into ppc libgcc | 4 | Subject: [PATCH] Avoid hardcoded build paths into ppc libgcc |
diff --git a/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch b/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch index e4d57c27ef..454d05d473 100644 --- a/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch +++ b/meta/recipes-devtools/gcc/gcc/0025-gcc-testsuite-tweaks-for-mips-OE.patch | |||
@@ -1,6 +1,6 @@ | |||
1 | From f12acc6a383546d48da3bdfb2f25ca2adb7976d7 Mon Sep 17 00:00:00 2001 | 1 | From 8845248c81c0695ccc65311017deef824fa538fa Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Sun, 13 Aug 2023 10:24:05 +0100 | 3 | Date: Thu, 17 Aug 2023 09:01:40 -0700 |
4 | Subject: [PATCH] gcc testsuite tweaks for mips/OE | 4 | Subject: [PATCH] gcc testsuite tweaks for mips/OE |
5 | 5 | ||
6 | Disable loongson-mmi runtine, qemu doesn't appear to fully support them even if some | 6 | Disable loongson-mmi runtine, qemu doesn't appear to fully support them even if some |
@@ -26,10 +26,12 @@ Also, for gcc.target tests, add checks on wheter loongson or msa code can | |||
26 | be run before trying that, allowing downgrading of tests there to work too. | 26 | be run before trying that, allowing downgrading of tests there to work too. |
27 | 27 | ||
28 | Upstream-Status: Pending | 28 | Upstream-Status: Pending |
29 | |||
29 | [Parts of the patch may be able to be split off and acceptable to upstream with | 30 | [Parts of the patch may be able to be split off and acceptable to upstream with |
30 | discussion. Need to investigate why qemu-user passes the 'bad' instructions'] | 31 | discussion. Need to investigate why qemu-user passes the 'bad' instructions'] |
31 | 32 | ||
32 | Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org> | 33 | Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org> |
34 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
33 | --- | 35 | --- |
34 | gcc/testsuite/gcc.target/mips/mips.exp | 16 +++++++++ | 36 | gcc/testsuite/gcc.target/mips/mips.exp | 16 +++++++++ |
35 | gcc/testsuite/lib/gcc-dg.exp | 11 +++++++ | 37 | gcc/testsuite/lib/gcc-dg.exp | 11 +++++++ |
@@ -37,10 +39,10 @@ Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org> | |||
37 | 3 files changed, 41 insertions(+), 31 deletions(-) | 39 | 3 files changed, 41 insertions(+), 31 deletions(-) |
38 | 40 | ||
39 | diff --git a/gcc/testsuite/gcc.target/mips/mips.exp b/gcc/testsuite/gcc.target/mips/mips.exp | 41 | diff --git a/gcc/testsuite/gcc.target/mips/mips.exp b/gcc/testsuite/gcc.target/mips/mips.exp |
40 | index 15d574202d3..2cef9709774 100644 | 42 | index e028bc93b40..b54a40d6509 100644 |
41 | --- a/gcc/testsuite/gcc.target/mips/mips.exp | 43 | --- a/gcc/testsuite/gcc.target/mips/mips.exp |
42 | +++ b/gcc/testsuite/gcc.target/mips/mips.exp | 44 | +++ b/gcc/testsuite/gcc.target/mips/mips.exp |
43 | @@ -709,7 +709,23 @@ proc mips_first_unsupported_option { upstatus } { | 45 | @@ -711,7 +711,23 @@ proc mips_first_unsupported_option { upstatus } { |
44 | global mips_option_tests | 46 | global mips_option_tests |
45 | upvar $upstatus status | 47 | upvar $upstatus status |
46 | 48 | ||
@@ -65,10 +67,10 @@ index 15d574202d3..2cef9709774 100644 | |||
65 | regsub -all "\n" $code "\\n\\\n" asm | 67 | regsub -all "\n" $code "\\n\\\n" asm |
66 | # Use check_runtime from target-supports.exp, which caches | 68 | # Use check_runtime from target-supports.exp, which caches |
67 | diff --git a/gcc/testsuite/lib/gcc-dg.exp b/gcc/testsuite/lib/gcc-dg.exp | 69 | diff --git a/gcc/testsuite/lib/gcc-dg.exp b/gcc/testsuite/lib/gcc-dg.exp |
68 | index 9d79b9402e9..e0e5cbb1af8 100644 | 70 | index 228c21d1207..d3b88e0419e 100644 |
69 | --- a/gcc/testsuite/lib/gcc-dg.exp | 71 | --- a/gcc/testsuite/lib/gcc-dg.exp |
70 | +++ b/gcc/testsuite/lib/gcc-dg.exp | 72 | +++ b/gcc/testsuite/lib/gcc-dg.exp |
71 | @@ -240,9 +240,20 @@ proc schedule-cleanups { opts } { | 73 | @@ -232,9 +232,20 @@ proc schedule-cleanups { opts } { |
72 | 74 | ||
73 | proc gcc-dg-test-1 { target_compile prog do_what extra_tool_flags } { | 75 | proc gcc-dg-test-1 { target_compile prog do_what extra_tool_flags } { |
74 | # Set up the compiler flags, based on what we're going to do. | 76 | # Set up the compiler flags, based on what we're going to do. |
@@ -77,23 +79,23 @@ index 9d79b9402e9..e0e5cbb1af8 100644 | |||
77 | set options [list] | 79 | set options [list] |
78 | 80 | ||
79 | + if [info exists do-what-limit] then { | 81 | + if [info exists do-what-limit] then { |
80 | + # Demote run tests to $do-what-limit if set | 82 | + # Demote run tests to $do-what-limit if set |
81 | + switch $do_what { | 83 | + switch $do_what { |
82 | + run { | 84 | + run { |
83 | + set do_what ${do-what-limit} | 85 | + set do_what ${do-what-limit} |
84 | + set dg-do-what ${do-what-limit} | 86 | + set dg-do-what ${do-what-limit} |
85 | + } | 87 | + } |
86 | + } | 88 | + } |
87 | + } | 89 | + } |
88 | + | 90 | + |
89 | switch $do_what { | 91 | switch $do_what { |
90 | "preprocess" { | 92 | "preprocess" { |
91 | set compile_type "preprocess" | 93 | set compile_type "preprocess" |
92 | diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp | 94 | diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp |
93 | index 40f71e9ed8b..10e267fa16d 100644 | 95 | index 45435586de2..04942540d8b 100644 |
94 | --- a/gcc/testsuite/lib/target-supports.exp | 96 | --- a/gcc/testsuite/lib/target-supports.exp |
95 | +++ b/gcc/testsuite/lib/target-supports.exp | 97 | +++ b/gcc/testsuite/lib/target-supports.exp |
96 | @@ -2155,14 +2155,7 @@ proc check_mips_loongson_mmi_hw_available { } { | 98 | @@ -2483,14 +2483,7 @@ proc check_mips_loongson_mmi_hw_available { } { |
97 | if { !([istarget mips*-*-*]) } { | 99 | if { !([istarget mips*-*-*]) } { |
98 | expr 0 | 100 | expr 0 |
99 | } else { | 101 | } else { |
@@ -109,7 +111,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
109 | } | 111 | } |
110 | }] | 112 | }] |
111 | } | 113 | } |
112 | @@ -2176,29 +2169,7 @@ proc check_mips_msa_hw_available { } { | 114 | @@ -2504,29 +2497,7 @@ proc check_mips_msa_hw_available { } { |
113 | if { !([istarget mips*-*-*]) } { | 115 | if { !([istarget mips*-*-*]) } { |
114 | expr 0 | 116 | expr 0 |
115 | } else { | 117 | } else { |
@@ -140,7 +142,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
140 | } | 142 | } |
141 | }] | 143 | }] |
142 | } | 144 | } |
143 | @@ -9187,6 +9158,7 @@ proc is-effective-target-keyword { arg } { | 145 | @@ -9897,6 +9868,7 @@ proc is-effective-target-keyword { arg } { |
144 | 146 | ||
145 | proc et-dg-runtest { runtest testcases flags default-extra-flags } { | 147 | proc et-dg-runtest { runtest testcases flags default-extra-flags } { |
146 | global dg-do-what-default | 148 | global dg-do-what-default |
@@ -148,7 +150,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
148 | global EFFECTIVE_TARGETS | 150 | global EFFECTIVE_TARGETS |
149 | global et_index | 151 | global et_index |
150 | 152 | ||
151 | @@ -9194,6 +9166,7 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } { | 153 | @@ -9904,6 +9876,7 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } { |
152 | foreach target $EFFECTIVE_TARGETS { | 154 | foreach target $EFFECTIVE_TARGETS { |
153 | set target_flags $flags | 155 | set target_flags $flags |
154 | set dg-do-what-default compile | 156 | set dg-do-what-default compile |
@@ -156,7 +158,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
156 | set et_index [lsearch -exact $EFFECTIVE_TARGETS $target] | 158 | set et_index [lsearch -exact $EFFECTIVE_TARGETS $target] |
157 | if { [info procs add_options_for_${target}] != [list] } { | 159 | if { [info procs add_options_for_${target}] != [list] } { |
158 | set target_flags [add_options_for_${target} "$flags"] | 160 | set target_flags [add_options_for_${target} "$flags"] |
159 | @@ -9201,8 +9174,10 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } { | 161 | @@ -9911,8 +9884,10 @@ proc et-dg-runtest { runtest testcases flags default-extra-flags } { |
160 | if { [info procs check_effective_target_${target}_runtime] | 162 | if { [info procs check_effective_target_${target}_runtime] |
161 | != [list] && [check_effective_target_${target}_runtime] } { | 163 | != [list] && [check_effective_target_${target}_runtime] } { |
162 | set dg-do-what-default run | 164 | set dg-do-what-default run |
@@ -167,7 +169,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
167 | } | 169 | } |
168 | } else { | 170 | } else { |
169 | set et_index 0 | 171 | set et_index 0 |
170 | @@ -10789,6 +10764,7 @@ proc check_effective_target_sigsetjmp {} { | 172 | @@ -11606,6 +11581,7 @@ proc check_effective_target_sigsetjmp {} { |
171 | proc check_vect_support_and_set_flags { } { | 173 | proc check_vect_support_and_set_flags { } { |
172 | global DEFAULT_VECTCFLAGS | 174 | global DEFAULT_VECTCFLAGS |
173 | global dg-do-what-default | 175 | global dg-do-what-default |
@@ -175,7 +177,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
175 | global EFFECTIVE_TARGETS | 177 | global EFFECTIVE_TARGETS |
176 | 178 | ||
177 | if [istarget powerpc-*paired*] { | 179 | if [istarget powerpc-*paired*] { |
178 | @@ -10797,6 +10773,7 @@ proc check_vect_support_and_set_flags { } { | 180 | @@ -11614,6 +11590,7 @@ proc check_vect_support_and_set_flags { } { |
179 | set dg-do-what-default run | 181 | set dg-do-what-default run |
180 | } else { | 182 | } else { |
181 | set dg-do-what-default compile | 183 | set dg-do-what-default compile |
@@ -183,15 +185,15 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
183 | } | 185 | } |
184 | } elseif [istarget powerpc*-*-*] { | 186 | } elseif [istarget powerpc*-*-*] { |
185 | # Skip targets not supporting -maltivec. | 187 | # Skip targets not supporting -maltivec. |
186 | @@ -10821,6 +10798,7 @@ proc check_vect_support_and_set_flags { } { | 188 | @@ -11644,6 +11621,7 @@ proc check_vect_support_and_set_flags { } { |
187 | lappend DEFAULT_VECTCFLAGS "-mcpu=970" | 189 | set DEFAULT_VECTCFLAGS [linsert $DEFAULT_VECTCFLAGS 0 "-mcpu=970"] |
188 | } | 190 | } |
189 | set dg-do-what-default compile | 191 | set dg-do-what-default compile |
190 | + set do-what-limit link | 192 | + set do-what-limit link |
191 | } | 193 | } |
192 | } elseif { [istarget i?86-*-*] || [istarget x86_64-*-*] } { | 194 | } elseif { [istarget i?86-*-*] || [istarget x86_64-*-*] } { |
193 | lappend DEFAULT_VECTCFLAGS "-msse2" | 195 | lappend DEFAULT_VECTCFLAGS "-msse2" |
194 | @@ -10828,6 +10806,7 @@ proc check_vect_support_and_set_flags { } { | 196 | @@ -11651,6 +11629,7 @@ proc check_vect_support_and_set_flags { } { |
195 | set dg-do-what-default run | 197 | set dg-do-what-default run |
196 | } else { | 198 | } else { |
197 | set dg-do-what-default compile | 199 | set dg-do-what-default compile |
@@ -199,7 +201,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
199 | } | 201 | } |
200 | } elseif { [istarget mips*-*-*] | 202 | } elseif { [istarget mips*-*-*] |
201 | && [check_effective_target_nomips16] } { | 203 | && [check_effective_target_nomips16] } { |
202 | @@ -10847,6 +10826,7 @@ proc check_vect_support_and_set_flags { } { | 204 | @@ -11670,6 +11649,7 @@ proc check_vect_support_and_set_flags { } { |
203 | set dg-do-what-default run | 205 | set dg-do-what-default run |
204 | } else { | 206 | } else { |
205 | set dg-do-what-default compile | 207 | set dg-do-what-default compile |
@@ -207,7 +209,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
207 | } | 209 | } |
208 | } elseif [istarget alpha*-*-*] { | 210 | } elseif [istarget alpha*-*-*] { |
209 | # Alpha's vectorization capabilities are extremely limited. | 211 | # Alpha's vectorization capabilities are extremely limited. |
210 | @@ -10860,6 +10840,7 @@ proc check_vect_support_and_set_flags { } { | 212 | @@ -11683,6 +11663,7 @@ proc check_vect_support_and_set_flags { } { |
211 | set dg-do-what-default run | 213 | set dg-do-what-default run |
212 | } else { | 214 | } else { |
213 | set dg-do-what-default compile | 215 | set dg-do-what-default compile |
@@ -215,7 +217,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
215 | } | 217 | } |
216 | } elseif [istarget ia64-*-*] { | 218 | } elseif [istarget ia64-*-*] { |
217 | set dg-do-what-default run | 219 | set dg-do-what-default run |
218 | @@ -10873,6 +10854,7 @@ proc check_vect_support_and_set_flags { } { | 220 | @@ -11696,6 +11677,7 @@ proc check_vect_support_and_set_flags { } { |
219 | set dg-do-what-default run | 221 | set dg-do-what-default run |
220 | } else { | 222 | } else { |
221 | set dg-do-what-default compile | 223 | set dg-do-what-default compile |
@@ -223,7 +225,7 @@ index 40f71e9ed8b..10e267fa16d 100644 | |||
223 | } | 225 | } |
224 | } elseif [istarget aarch64*-*-*] { | 226 | } elseif [istarget aarch64*-*-*] { |
225 | set dg-do-what-default run | 227 | set dg-do-what-default run |
226 | @@ -10897,6 +10879,7 @@ proc check_vect_support_and_set_flags { } { | 228 | @@ -11720,6 +11702,7 @@ proc check_vect_support_and_set_flags { } { |
227 | } else { | 229 | } else { |
228 | lappend DEFAULT_VECTCFLAGS "-march=z14" "-mzarch" | 230 | lappend DEFAULT_VECTCFLAGS "-march=z14" "-mzarch" |
229 | set dg-do-what-default compile | 231 | set dg-do-what-default compile |
diff --git a/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch b/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch deleted file mode 100644 index a408a98698..0000000000 --- a/meta/recipes-devtools/gcc/gcc/0026-aarch64-Fix-loose-ldpstp-check-PR111411.patch +++ /dev/null | |||
@@ -1,117 +0,0 @@ | |||
1 | From adb60dc78e0da4877747f32347cee339364775be Mon Sep 17 00:00:00 2001 | ||
2 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
3 | Date: Fri, 15 Sep 2023 09:19:14 +0100 | ||
4 | Subject: [PATCH] aarch64: Fix loose ldpstp check [PR111411] | ||
5 | |||
6 | aarch64_operands_ok_for_ldpstp contained the code: | ||
7 | |||
8 | /* One of the memory accesses must be a mempair operand. | ||
9 | If it is not the first one, they need to be swapped by the | ||
10 | peephole. */ | ||
11 | if (!aarch64_mem_pair_operand (mem_1, GET_MODE (mem_1)) | ||
12 | && !aarch64_mem_pair_operand (mem_2, GET_MODE (mem_2))) | ||
13 | return false; | ||
14 | |||
15 | But the requirement isn't just that one of the accesses must be a | ||
16 | valid mempair operand. It's that the lower access must be, since | ||
17 | that's the access that will be used for the instruction operand. | ||
18 | |||
19 | gcc/ | ||
20 | PR target/111411 | ||
21 | * config/aarch64/aarch64.cc (aarch64_operands_ok_for_ldpstp): Require | ||
22 | the lower memory access to a mem-pair operand. | ||
23 | |||
24 | gcc/testsuite/ | ||
25 | PR target/111411 | ||
26 | * gcc.dg/rtl/aarch64/pr111411.c: New test. | ||
27 | |||
28 | Upstream-Status: Backport [https://gcc.gnu.org/git/gitweb.cgi?p=gcc.git;h=2d38f45bcca62ca0c7afef4b579f82c5c2a01610] | ||
29 | Signed-off-by: Martin Jansa <martin.jansa@gmail.com> | ||
30 | --- | ||
31 | gcc/config/aarch64/aarch64.cc | 8 ++- | ||
32 | gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c | 57 +++++++++++++++++++++ | ||
33 | 2 files changed, 60 insertions(+), 5 deletions(-) | ||
34 | create mode 100644 gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c | ||
35 | |||
36 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
37 | index 6118a3354ac..9b1f791ca8b 100644 | ||
38 | --- a/gcc/config/aarch64/aarch64.cc | ||
39 | +++ b/gcc/config/aarch64/aarch64.cc | ||
40 | @@ -26154,11 +26154,9 @@ aarch64_operands_ok_for_ldpstp (rtx *operands, bool load, | ||
41 | gcc_assert (known_eq (GET_MODE_SIZE (GET_MODE (mem_1)), | ||
42 | GET_MODE_SIZE (GET_MODE (mem_2)))); | ||
43 | |||
44 | - /* One of the memory accesses must be a mempair operand. | ||
45 | - If it is not the first one, they need to be swapped by the | ||
46 | - peephole. */ | ||
47 | - if (!aarch64_mem_pair_operand (mem_1, GET_MODE (mem_1)) | ||
48 | - && !aarch64_mem_pair_operand (mem_2, GET_MODE (mem_2))) | ||
49 | + /* The lower memory access must be a mem-pair operand. */ | ||
50 | + rtx lower_mem = reversed ? mem_2 : mem_1; | ||
51 | + if (!aarch64_mem_pair_operand (lower_mem, GET_MODE (lower_mem))) | ||
52 | return false; | ||
53 | |||
54 | if (REG_P (reg_1) && FP_REGNUM_P (REGNO (reg_1))) | ||
55 | diff --git a/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c b/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c | ||
56 | new file mode 100644 | ||
57 | index 00000000000..ad07e9c6c89 | ||
58 | --- /dev/null | ||
59 | +++ b/gcc/testsuite/gcc.dg/rtl/aarch64/pr111411.c | ||
60 | @@ -0,0 +1,57 @@ | ||
61 | +/* { dg-do compile { target aarch64*-*-* } } */ | ||
62 | +/* { dg-require-effective-target lp64 } */ | ||
63 | +/* { dg-options "-O -fdisable-rtl-postreload -fpeephole2 -fno-schedule-fusion" } */ | ||
64 | + | ||
65 | +extern int data[]; | ||
66 | + | ||
67 | +void __RTL (startwith ("ira")) foo (void *ptr) | ||
68 | +{ | ||
69 | + (function "foo" | ||
70 | + (param "ptr" | ||
71 | + (DECL_RTL (reg/v:DI <0> [ ptr ])) | ||
72 | + (DECL_RTL_INCOMING (reg/v:DI x0 [ ptr ])) | ||
73 | + ) ;; param "ptr" | ||
74 | + (insn-chain | ||
75 | + (block 2 | ||
76 | + (edge-from entry (flags "FALLTHRU")) | ||
77 | + (cnote 3 [bb 2] NOTE_INSN_BASIC_BLOCK) | ||
78 | + (insn 4 (set (reg:DI <0>) (reg:DI x0))) | ||
79 | + (insn 5 (set (reg:DI <1>) | ||
80 | + (plus:DI (reg:DI <0>) (const_int 768)))) | ||
81 | + (insn 6 (set (mem:SI (plus:DI (reg:DI <0>) | ||
82 | + (const_int 508)) [1 &data+508 S4 A4]) | ||
83 | + (const_int 0))) | ||
84 | + (insn 7 (set (mem:SI (plus:DI (reg:DI <1>) | ||
85 | + (const_int -256)) [1 &data+512 S4 A4]) | ||
86 | + (const_int 0))) | ||
87 | + (edge-to exit (flags "FALLTHRU")) | ||
88 | + ) ;; block 2 | ||
89 | + ) ;; insn-chain | ||
90 | + ) ;; function | ||
91 | +} | ||
92 | + | ||
93 | +void __RTL (startwith ("ira")) bar (void *ptr) | ||
94 | +{ | ||
95 | + (function "bar" | ||
96 | + (param "ptr" | ||
97 | + (DECL_RTL (reg/v:DI <0> [ ptr ])) | ||
98 | + (DECL_RTL_INCOMING (reg/v:DI x0 [ ptr ])) | ||
99 | + ) ;; param "ptr" | ||
100 | + (insn-chain | ||
101 | + (block 2 | ||
102 | + (edge-from entry (flags "FALLTHRU")) | ||
103 | + (cnote 3 [bb 2] NOTE_INSN_BASIC_BLOCK) | ||
104 | + (insn 4 (set (reg:DI <0>) (reg:DI x0))) | ||
105 | + (insn 5 (set (reg:DI <1>) | ||
106 | + (plus:DI (reg:DI <0>) (const_int 768)))) | ||
107 | + (insn 6 (set (mem:SI (plus:DI (reg:DI <1>) | ||
108 | + (const_int -256)) [1 &data+512 S4 A4]) | ||
109 | + (const_int 0))) | ||
110 | + (insn 7 (set (mem:SI (plus:DI (reg:DI <0>) | ||
111 | + (const_int 508)) [1 &data+508 S4 A4]) | ||
112 | + (const_int 0))) | ||
113 | + (edge-to exit (flags "FALLTHRU")) | ||
114 | + ) ;; block 2 | ||
115 | + ) ;; insn-chain | ||
116 | + ) ;; function | ||
117 | +} | ||
diff --git a/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch b/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch deleted file mode 100644 index 4b89036814..0000000000 --- a/meta/recipes-devtools/gcc/gcc/0027-Fix-gcc-vect-module-testcases.patch +++ /dev/null | |||
@@ -1,26 +0,0 @@ | |||
1 | Upstream-Status: Backport [https://gcc.gnu.org/git/gitweb.cgi?p=gcc.git;h=948dbc5ee45f9ffd5f41fd6782704081cc7c8c27] | ||
2 | |||
3 | Signed-off-by: Harish Sadineni <Harish.Sadineni@windriver.com> | ||
4 | |||
5 | diff --git a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c | ||
6 | index ed63ff59cc0..009c849b7e7 100644 | ||
7 | --- a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c | ||
8 | +++ b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-10.c | ||
9 | @@ -1,3 +1,5 @@ | ||
10 | +/* Since this uses dg-additional-sources, need to specify `dg-do run` instead of the default. */ | ||
11 | +/* { dg-do run } */ | ||
12 | /* { dg-require-effective-target vect_simd_clones } */ | ||
13 | /* { dg-additional-options "-fopenmp-simd" } */ | ||
14 | /* { dg-additional-options "-mavx" { target avx_runtime } } */ | ||
15 | diff --git a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c | ||
16 | index c44471e35bc..4699a3f3c80 100644 | ||
17 | --- a/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c | ||
18 | +++ b/gcc/testsuite/gcc.dg/vect/vect-simd-clone-12.c | ||
19 | @@ -1,3 +1,5 @@ | ||
20 | +/* Since this uses dg-additional-sources, need to specify `dg-do run` instead of the default. */ | ||
21 | +/* { dg-do run } */ | ||
22 | /* { dg-require-effective-target vect_simd_clones } */ | ||
23 | /* { dg-additional-options "-fopenmp-simd" } */ | ||
24 | /* { dg-additional-options "-mavx" { target avx_runtime } } */ | ||
25 | -- | ||
26 | 2.43.0 | ||
diff --git a/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch b/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch deleted file mode 100644 index 81b5067c33..0000000000 --- a/meta/recipes-devtools/gcc/gcc/CVE-2023-4039.patch +++ /dev/null | |||
@@ -1,3093 +0,0 @@ | |||
1 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
2 | Subject: [PATCH 00/19] aarch64: Fix -fstack-protector issue | ||
3 | Date: Tue, 12 Sep 2023 16:25:10 +0100 | ||
4 | |||
5 | This series of patches fixes deficiencies in GCC's -fstack-protector | ||
6 | implementation for AArch64 when using dynamically allocated stack space. | ||
7 | This is CVE-2023-4039. See: | ||
8 | |||
9 | https://developer.arm.com/Arm%20Security%20Center/GCC%20Stack%20Protector%20Vulnerability%20AArch64 | ||
10 | https://github.com/metaredteam/external-disclosures/security/advisories/GHSA-x7ch-h5rf-w2mf | ||
11 | |||
12 | for more details. | ||
13 | |||
14 | The fix is to put the saved registers above the locals area when | ||
15 | -fstack-protector is used. | ||
16 | |||
17 | The series also fixes a stack-clash problem that I found while working | ||
18 | on the CVE. In unpatched sources, the stack-clash problem would only | ||
19 | trigger for unrealistic numbers of arguments (8K 64-bit arguments, or an | ||
20 | equivalent). But it would be a more significant issue with the new | ||
21 | -fstack-protector frame layout. It's therefore important that both | ||
22 | problems are fixed together. | ||
23 | |||
24 | Some reorganisation of the code seemed necessary to fix the problems in a | ||
25 | cleanish way. The series is therefore quite long, but only a handful of | ||
26 | patches should have any effect on code generation. | ||
27 | |||
28 | See the individual patches for a detailed description. | ||
29 | |||
30 | Tested on aarch64-linux-gnu. Pushed to trunk and to all active branches. | ||
31 | I've also pushed backports to GCC 7+ to vendors/ARM/heads/CVE-2023-4039. | ||
32 | |||
33 | CVE: CVE-2023-4039 | ||
34 | Upstream-Status: Backport | ||
35 | Signed-off-by: Ross Burton <ross.burton@arm.com> | ||
36 | |||
37 | |||
38 | From 71a2aa2127283f450c623d3604dbcabe0e14a8d4 Mon Sep 17 00:00:00 2001 | ||
39 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
40 | Date: Tue, 12 Sep 2023 16:07:12 +0100 | ||
41 | Subject: [PATCH 01/19] aarch64: Use local frame vars in shrink-wrapping code | ||
42 | |||
43 | aarch64_layout_frame uses a shorthand for referring to | ||
44 | cfun->machine->frame: | ||
45 | |||
46 | aarch64_frame &frame = cfun->machine->frame; | ||
47 | |||
48 | This patch does the same for some other heavy users of the structure. | ||
49 | No functional change intended. | ||
50 | |||
51 | gcc/ | ||
52 | * config/aarch64/aarch64.cc (aarch64_save_callee_saves): Use | ||
53 | a local shorthand for cfun->machine->frame. | ||
54 | (aarch64_restore_callee_saves, aarch64_get_separate_components): | ||
55 | (aarch64_process_components): Likewise. | ||
56 | (aarch64_allocate_and_probe_stack_space): Likewise. | ||
57 | (aarch64_expand_prologue, aarch64_expand_epilogue): Likewise. | ||
58 | (aarch64_layout_frame): Use existing shorthand for one more case. | ||
59 | --- | ||
60 | gcc/config/aarch64/aarch64.cc | 123 ++++++++++++++++++---------------- | ||
61 | 1 file changed, 64 insertions(+), 59 deletions(-) | ||
62 | |||
63 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
64 | index 822a2b49a46..5d473d161d9 100644 | ||
65 | --- a/gcc/config/aarch64/aarch64.cc | ||
66 | +++ b/gcc/config/aarch64/aarch64.cc | ||
67 | @@ -8612,7 +8612,7 @@ aarch64_layout_frame (void) | ||
68 | frame.is_scs_enabled | ||
69 | = (!crtl->calls_eh_return | ||
70 | && sanitize_flags_p (SANITIZE_SHADOW_CALL_STACK) | ||
71 | - && known_ge (cfun->machine->frame.reg_offset[LR_REGNUM], 0)); | ||
72 | + && known_ge (frame.reg_offset[LR_REGNUM], 0)); | ||
73 | |||
74 | /* When shadow call stack is enabled, the scs_pop in the epilogue will | ||
75 | restore x30, and we don't need to pop x30 again in the traditional | ||
76 | @@ -9078,6 +9078,7 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
77 | unsigned start, unsigned limit, bool skip_wb, | ||
78 | bool hard_fp_valid_p) | ||
79 | { | ||
80 | + aarch64_frame &frame = cfun->machine->frame; | ||
81 | rtx_insn *insn; | ||
82 | unsigned regno; | ||
83 | unsigned regno2; | ||
84 | @@ -9092,8 +9093,8 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
85 | bool frame_related_p = aarch64_emit_cfi_for_reg_p (regno); | ||
86 | |||
87 | if (skip_wb | ||
88 | - && (regno == cfun->machine->frame.wb_push_candidate1 | ||
89 | - || regno == cfun->machine->frame.wb_push_candidate2)) | ||
90 | + && (regno == frame.wb_push_candidate1 | ||
91 | + || regno == frame.wb_push_candidate2)) | ||
92 | continue; | ||
93 | |||
94 | if (cfun->machine->reg_is_wrapped_separately[regno]) | ||
95 | @@ -9101,7 +9102,7 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
96 | |||
97 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
98 | reg = gen_rtx_REG (mode, regno); | ||
99 | - offset = start_offset + cfun->machine->frame.reg_offset[regno]; | ||
100 | + offset = start_offset + frame.reg_offset[regno]; | ||
101 | rtx base_rtx = stack_pointer_rtx; | ||
102 | poly_int64 sp_offset = offset; | ||
103 | |||
104 | @@ -9114,7 +9115,7 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
105 | { | ||
106 | gcc_assert (known_eq (start_offset, 0)); | ||
107 | poly_int64 fp_offset | ||
108 | - = cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
109 | + = frame.below_hard_fp_saved_regs_size; | ||
110 | if (hard_fp_valid_p) | ||
111 | base_rtx = hard_frame_pointer_rtx; | ||
112 | else | ||
113 | @@ -9136,8 +9137,7 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
114 | && (regno2 = aarch64_next_callee_save (regno + 1, limit)) <= limit | ||
115 | && !cfun->machine->reg_is_wrapped_separately[regno2] | ||
116 | && known_eq (GET_MODE_SIZE (mode), | ||
117 | - cfun->machine->frame.reg_offset[regno2] | ||
118 | - - cfun->machine->frame.reg_offset[regno])) | ||
119 | + frame.reg_offset[regno2] - frame.reg_offset[regno])) | ||
120 | { | ||
121 | rtx reg2 = gen_rtx_REG (mode, regno2); | ||
122 | rtx mem2; | ||
123 | @@ -9187,6 +9187,7 @@ static void | ||
124 | aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start, | ||
125 | unsigned limit, bool skip_wb, rtx *cfi_ops) | ||
126 | { | ||
127 | + aarch64_frame &frame = cfun->machine->frame; | ||
128 | unsigned regno; | ||
129 | unsigned regno2; | ||
130 | poly_int64 offset; | ||
131 | @@ -9203,13 +9204,13 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start, | ||
132 | rtx reg, mem; | ||
133 | |||
134 | if (skip_wb | ||
135 | - && (regno == cfun->machine->frame.wb_pop_candidate1 | ||
136 | - || regno == cfun->machine->frame.wb_pop_candidate2)) | ||
137 | + && (regno == frame.wb_pop_candidate1 | ||
138 | + || regno == frame.wb_pop_candidate2)) | ||
139 | continue; | ||
140 | |||
141 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
142 | reg = gen_rtx_REG (mode, regno); | ||
143 | - offset = start_offset + cfun->machine->frame.reg_offset[regno]; | ||
144 | + offset = start_offset + frame.reg_offset[regno]; | ||
145 | rtx base_rtx = stack_pointer_rtx; | ||
146 | if (mode == VNx2DImode && BYTES_BIG_ENDIAN) | ||
147 | aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg, | ||
148 | @@ -9220,8 +9221,7 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start, | ||
149 | && (regno2 = aarch64_next_callee_save (regno + 1, limit)) <= limit | ||
150 | && !cfun->machine->reg_is_wrapped_separately[regno2] | ||
151 | && known_eq (GET_MODE_SIZE (mode), | ||
152 | - cfun->machine->frame.reg_offset[regno2] | ||
153 | - - cfun->machine->frame.reg_offset[regno])) | ||
154 | + frame.reg_offset[regno2] - frame.reg_offset[regno])) | ||
155 | { | ||
156 | rtx reg2 = gen_rtx_REG (mode, regno2); | ||
157 | rtx mem2; | ||
158 | @@ -9326,6 +9326,7 @@ offset_12bit_unsigned_scaled_p (machine_mode mode, poly_int64 offset) | ||
159 | static sbitmap | ||
160 | aarch64_get_separate_components (void) | ||
161 | { | ||
162 | + aarch64_frame &frame = cfun->machine->frame; | ||
163 | sbitmap components = sbitmap_alloc (LAST_SAVED_REGNUM + 1); | ||
164 | bitmap_clear (components); | ||
165 | |||
166 | @@ -9342,18 +9343,18 @@ aarch64_get_separate_components (void) | ||
167 | if (mode == VNx2DImode && BYTES_BIG_ENDIAN) | ||
168 | continue; | ||
169 | |||
170 | - poly_int64 offset = cfun->machine->frame.reg_offset[regno]; | ||
171 | + poly_int64 offset = frame.reg_offset[regno]; | ||
172 | |||
173 | /* If the register is saved in the first SVE save slot, we use | ||
174 | it as a stack probe for -fstack-clash-protection. */ | ||
175 | if (flag_stack_clash_protection | ||
176 | - && maybe_ne (cfun->machine->frame.below_hard_fp_saved_regs_size, 0) | ||
177 | + && maybe_ne (frame.below_hard_fp_saved_regs_size, 0) | ||
178 | && known_eq (offset, 0)) | ||
179 | continue; | ||
180 | |||
181 | /* Get the offset relative to the register we'll use. */ | ||
182 | if (frame_pointer_needed) | ||
183 | - offset -= cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
184 | + offset -= frame.below_hard_fp_saved_regs_size; | ||
185 | else | ||
186 | offset += crtl->outgoing_args_size; | ||
187 | |||
188 | @@ -9372,11 +9373,11 @@ aarch64_get_separate_components (void) | ||
189 | /* If the spare predicate register used by big-endian SVE code | ||
190 | is call-preserved, it must be saved in the main prologue | ||
191 | before any saves that use it. */ | ||
192 | - if (cfun->machine->frame.spare_pred_reg != INVALID_REGNUM) | ||
193 | - bitmap_clear_bit (components, cfun->machine->frame.spare_pred_reg); | ||
194 | + if (frame.spare_pred_reg != INVALID_REGNUM) | ||
195 | + bitmap_clear_bit (components, frame.spare_pred_reg); | ||
196 | |||
197 | - unsigned reg1 = cfun->machine->frame.wb_push_candidate1; | ||
198 | - unsigned reg2 = cfun->machine->frame.wb_push_candidate2; | ||
199 | + unsigned reg1 = frame.wb_push_candidate1; | ||
200 | + unsigned reg2 = frame.wb_push_candidate2; | ||
201 | /* If registers have been chosen to be stored/restored with | ||
202 | writeback don't interfere with them to avoid having to output explicit | ||
203 | stack adjustment instructions. */ | ||
204 | @@ -9485,6 +9486,7 @@ aarch64_get_next_set_bit (sbitmap bmp, unsigned int start) | ||
205 | static void | ||
206 | aarch64_process_components (sbitmap components, bool prologue_p) | ||
207 | { | ||
208 | + aarch64_frame &frame = cfun->machine->frame; | ||
209 | rtx ptr_reg = gen_rtx_REG (Pmode, frame_pointer_needed | ||
210 | ? HARD_FRAME_POINTER_REGNUM | ||
211 | : STACK_POINTER_REGNUM); | ||
212 | @@ -9499,9 +9501,9 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
213 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
214 | |||
215 | rtx reg = gen_rtx_REG (mode, regno); | ||
216 | - poly_int64 offset = cfun->machine->frame.reg_offset[regno]; | ||
217 | + poly_int64 offset = frame.reg_offset[regno]; | ||
218 | if (frame_pointer_needed) | ||
219 | - offset -= cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
220 | + offset -= frame.below_hard_fp_saved_regs_size; | ||
221 | else | ||
222 | offset += crtl->outgoing_args_size; | ||
223 | |||
224 | @@ -9526,14 +9528,14 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
225 | break; | ||
226 | } | ||
227 | |||
228 | - poly_int64 offset2 = cfun->machine->frame.reg_offset[regno2]; | ||
229 | + poly_int64 offset2 = frame.reg_offset[regno2]; | ||
230 | /* The next register is not of the same class or its offset is not | ||
231 | mergeable with the current one into a pair. */ | ||
232 | if (aarch64_sve_mode_p (mode) | ||
233 | || !satisfies_constraint_Ump (mem) | ||
234 | || GP_REGNUM_P (regno) != GP_REGNUM_P (regno2) | ||
235 | || (crtl->abi->id () == ARM_PCS_SIMD && FP_REGNUM_P (regno)) | ||
236 | - || maybe_ne ((offset2 - cfun->machine->frame.reg_offset[regno]), | ||
237 | + || maybe_ne ((offset2 - frame.reg_offset[regno]), | ||
238 | GET_MODE_SIZE (mode))) | ||
239 | { | ||
240 | insn = emit_insn (set); | ||
241 | @@ -9555,7 +9557,7 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
242 | /* REGNO2 can be saved/restored in a pair with REGNO. */ | ||
243 | rtx reg2 = gen_rtx_REG (mode, regno2); | ||
244 | if (frame_pointer_needed) | ||
245 | - offset2 -= cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
246 | + offset2 -= frame.below_hard_fp_saved_regs_size; | ||
247 | else | ||
248 | offset2 += crtl->outgoing_args_size; | ||
249 | rtx addr2 = plus_constant (Pmode, ptr_reg, offset2); | ||
250 | @@ -9650,6 +9652,7 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
251 | bool frame_related_p, | ||
252 | bool final_adjustment_p) | ||
253 | { | ||
254 | + aarch64_frame &frame = cfun->machine->frame; | ||
255 | HOST_WIDE_INT guard_size | ||
256 | = 1 << param_stack_clash_protection_guard_size; | ||
257 | HOST_WIDE_INT guard_used_by_caller = STACK_CLASH_CALLER_GUARD; | ||
258 | @@ -9670,25 +9673,25 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
259 | register as a probe. We can't assume that LR was saved at position 0 | ||
260 | though, so treat any space below it as unprobed. */ | ||
261 | if (final_adjustment_p | ||
262 | - && known_eq (cfun->machine->frame.below_hard_fp_saved_regs_size, 0)) | ||
263 | + && known_eq (frame.below_hard_fp_saved_regs_size, 0)) | ||
264 | { | ||
265 | - poly_int64 lr_offset = cfun->machine->frame.reg_offset[LR_REGNUM]; | ||
266 | + poly_int64 lr_offset = frame.reg_offset[LR_REGNUM]; | ||
267 | if (known_ge (lr_offset, 0)) | ||
268 | min_probe_threshold -= lr_offset.to_constant (); | ||
269 | else | ||
270 | gcc_assert (!flag_stack_clash_protection || known_eq (poly_size, 0)); | ||
271 | } | ||
272 | |||
273 | - poly_int64 frame_size = cfun->machine->frame.frame_size; | ||
274 | + poly_int64 frame_size = frame.frame_size; | ||
275 | |||
276 | /* We should always have a positive probe threshold. */ | ||
277 | gcc_assert (min_probe_threshold > 0); | ||
278 | |||
279 | if (flag_stack_clash_protection && !final_adjustment_p) | ||
280 | { | ||
281 | - poly_int64 initial_adjust = cfun->machine->frame.initial_adjust; | ||
282 | - poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust; | ||
283 | - poly_int64 final_adjust = cfun->machine->frame.final_adjust; | ||
284 | + poly_int64 initial_adjust = frame.initial_adjust; | ||
285 | + poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
286 | + poly_int64 final_adjust = frame.final_adjust; | ||
287 | |||
288 | if (known_eq (frame_size, 0)) | ||
289 | { | ||
290 | @@ -9977,17 +9980,18 @@ aarch64_epilogue_uses (int regno) | ||
291 | void | ||
292 | aarch64_expand_prologue (void) | ||
293 | { | ||
294 | - poly_int64 frame_size = cfun->machine->frame.frame_size; | ||
295 | - poly_int64 initial_adjust = cfun->machine->frame.initial_adjust; | ||
296 | - HOST_WIDE_INT callee_adjust = cfun->machine->frame.callee_adjust; | ||
297 | - poly_int64 final_adjust = cfun->machine->frame.final_adjust; | ||
298 | - poly_int64 callee_offset = cfun->machine->frame.callee_offset; | ||
299 | - poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust; | ||
300 | + aarch64_frame &frame = cfun->machine->frame; | ||
301 | + poly_int64 frame_size = frame.frame_size; | ||
302 | + poly_int64 initial_adjust = frame.initial_adjust; | ||
303 | + HOST_WIDE_INT callee_adjust = frame.callee_adjust; | ||
304 | + poly_int64 final_adjust = frame.final_adjust; | ||
305 | + poly_int64 callee_offset = frame.callee_offset; | ||
306 | + poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
307 | poly_int64 below_hard_fp_saved_regs_size | ||
308 | - = cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
309 | - unsigned reg1 = cfun->machine->frame.wb_push_candidate1; | ||
310 | - unsigned reg2 = cfun->machine->frame.wb_push_candidate2; | ||
311 | - bool emit_frame_chain = cfun->machine->frame.emit_frame_chain; | ||
312 | + = frame.below_hard_fp_saved_regs_size; | ||
313 | + unsigned reg1 = frame.wb_push_candidate1; | ||
314 | + unsigned reg2 = frame.wb_push_candidate2; | ||
315 | + bool emit_frame_chain = frame.emit_frame_chain; | ||
316 | rtx_insn *insn; | ||
317 | |||
318 | if (flag_stack_clash_protection && known_eq (callee_adjust, 0)) | ||
319 | @@ -10018,7 +10022,7 @@ aarch64_expand_prologue (void) | ||
320 | } | ||
321 | |||
322 | /* Push return address to shadow call stack. */ | ||
323 | - if (cfun->machine->frame.is_scs_enabled) | ||
324 | + if (frame.is_scs_enabled) | ||
325 | emit_insn (gen_scs_push ()); | ||
326 | |||
327 | if (flag_stack_usage_info) | ||
328 | @@ -10057,7 +10061,7 @@ aarch64_expand_prologue (void) | ||
329 | |||
330 | /* The offset of the frame chain record (if any) from the current SP. */ | ||
331 | poly_int64 chain_offset = (initial_adjust + callee_adjust | ||
332 | - - cfun->machine->frame.hard_fp_offset); | ||
333 | + - frame.hard_fp_offset); | ||
334 | gcc_assert (known_ge (chain_offset, 0)); | ||
335 | |||
336 | /* The offset of the bottom of the save area from the current SP. */ | ||
337 | @@ -10160,16 +10164,17 @@ aarch64_use_return_insn_p (void) | ||
338 | void | ||
339 | aarch64_expand_epilogue (bool for_sibcall) | ||
340 | { | ||
341 | - poly_int64 initial_adjust = cfun->machine->frame.initial_adjust; | ||
342 | - HOST_WIDE_INT callee_adjust = cfun->machine->frame.callee_adjust; | ||
343 | - poly_int64 final_adjust = cfun->machine->frame.final_adjust; | ||
344 | - poly_int64 callee_offset = cfun->machine->frame.callee_offset; | ||
345 | - poly_int64 sve_callee_adjust = cfun->machine->frame.sve_callee_adjust; | ||
346 | + aarch64_frame &frame = cfun->machine->frame; | ||
347 | + poly_int64 initial_adjust = frame.initial_adjust; | ||
348 | + HOST_WIDE_INT callee_adjust = frame.callee_adjust; | ||
349 | + poly_int64 final_adjust = frame.final_adjust; | ||
350 | + poly_int64 callee_offset = frame.callee_offset; | ||
351 | + poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
352 | poly_int64 below_hard_fp_saved_regs_size | ||
353 | - = cfun->machine->frame.below_hard_fp_saved_regs_size; | ||
354 | - unsigned reg1 = cfun->machine->frame.wb_pop_candidate1; | ||
355 | - unsigned reg2 = cfun->machine->frame.wb_pop_candidate2; | ||
356 | - unsigned int last_gpr = (cfun->machine->frame.is_scs_enabled | ||
357 | + = frame.below_hard_fp_saved_regs_size; | ||
358 | + unsigned reg1 = frame.wb_pop_candidate1; | ||
359 | + unsigned reg2 = frame.wb_pop_candidate2; | ||
360 | + unsigned int last_gpr = (frame.is_scs_enabled | ||
361 | ? R29_REGNUM : R30_REGNUM); | ||
362 | rtx cfi_ops = NULL; | ||
363 | rtx_insn *insn; | ||
364 | @@ -10203,7 +10208,7 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
365 | /* We need to add memory barrier to prevent read from deallocated stack. */ | ||
366 | bool need_barrier_p | ||
367 | = maybe_ne (get_frame_size () | ||
368 | - + cfun->machine->frame.saved_varargs_size, 0); | ||
369 | + + frame.saved_varargs_size, 0); | ||
370 | |||
371 | /* Emit a barrier to prevent loads from a deallocated stack. */ | ||
372 | if (maybe_gt (final_adjust, crtl->outgoing_args_size) | ||
373 | @@ -10284,7 +10289,7 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
374 | } | ||
375 | |||
376 | /* Pop return address from shadow call stack. */ | ||
377 | - if (cfun->machine->frame.is_scs_enabled) | ||
378 | + if (frame.is_scs_enabled) | ||
379 | { | ||
380 | machine_mode mode = aarch64_reg_save_mode (R30_REGNUM); | ||
381 | rtx reg = gen_rtx_REG (mode, R30_REGNUM); | ||
382 | @@ -12740,24 +12745,24 @@ aarch64_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to) | ||
383 | poly_int64 | ||
384 | aarch64_initial_elimination_offset (unsigned from, unsigned to) | ||
385 | { | ||
386 | + aarch64_frame &frame = cfun->machine->frame; | ||
387 | + | ||
388 | if (to == HARD_FRAME_POINTER_REGNUM) | ||
389 | { | ||
390 | if (from == ARG_POINTER_REGNUM) | ||
391 | - return cfun->machine->frame.hard_fp_offset; | ||
392 | + return frame.hard_fp_offset; | ||
393 | |||
394 | if (from == FRAME_POINTER_REGNUM) | ||
395 | - return cfun->machine->frame.hard_fp_offset | ||
396 | - - cfun->machine->frame.locals_offset; | ||
397 | + return frame.hard_fp_offset - frame.locals_offset; | ||
398 | } | ||
399 | |||
400 | if (to == STACK_POINTER_REGNUM) | ||
401 | { | ||
402 | if (from == FRAME_POINTER_REGNUM) | ||
403 | - return cfun->machine->frame.frame_size | ||
404 | - - cfun->machine->frame.locals_offset; | ||
405 | + return frame.frame_size - frame.locals_offset; | ||
406 | } | ||
407 | |||
408 | - return cfun->machine->frame.frame_size; | ||
409 | + return frame.frame_size; | ||
410 | } | ||
411 | |||
412 | |||
413 | -- | ||
414 | 2.34.1 | ||
415 | |||
416 | |||
417 | From 89a9fa287706c5011f61926eaf65e7b996b963a3 Mon Sep 17 00:00:00 2001 | ||
418 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
419 | Date: Tue, 12 Sep 2023 16:07:12 +0100 | ||
420 | Subject: [PATCH 02/19] aarch64: Avoid a use of callee_offset | ||
421 | |||
422 | When we emit the frame chain, i.e. when we reach Here in this statement | ||
423 | of aarch64_expand_prologue: | ||
424 | |||
425 | if (emit_frame_chain) | ||
426 | { | ||
427 | // Here | ||
428 | ... | ||
429 | } | ||
430 | |||
431 | the stack is in one of two states: | ||
432 | |||
433 | - We've allocated up to the frame chain, but no more. | ||
434 | |||
435 | - We've allocated the whole frame, and the frame chain is within easy | ||
436 | reach of the new SP. | ||
437 | |||
438 | The offset of the frame chain from the current SP is available | ||
439 | in aarch64_frame as callee_offset. It is also available as the | ||
440 | chain_offset local variable, where the latter is calculated from other | ||
441 | data. (However, chain_offset is not always equal to callee_offset when | ||
442 | !emit_frame_chain, so chain_offset isn't redundant.) | ||
443 | |||
444 | In c600df9a4060da3c6121ff4d0b93f179eafd69d1 I switched to using | ||
445 | chain_offset for the initialisation of the hard frame pointer: | ||
446 | |||
447 | aarch64_add_offset (Pmode, hard_frame_pointer_rtx, | ||
448 | - stack_pointer_rtx, callee_offset, | ||
449 | + stack_pointer_rtx, chain_offset, | ||
450 | tmp1_rtx, tmp0_rtx, frame_pointer_needed); | ||
451 | |||
452 | But the later REG_CFA_ADJUST_CFA handling still used callee_offset. | ||
453 | |||
454 | I think the difference is harmless, but it's more logical for the | ||
455 | CFA note to be in sync, and it's more convenient for later patches | ||
456 | if it uses chain_offset. | ||
457 | |||
458 | gcc/ | ||
459 | * config/aarch64/aarch64.cc (aarch64_expand_prologue): Use | ||
460 | chain_offset rather than callee_offset. | ||
461 | --- | ||
462 | gcc/config/aarch64/aarch64.cc | 4 +--- | ||
463 | 1 file changed, 1 insertion(+), 3 deletions(-) | ||
464 | |||
465 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
466 | index 5d473d161d9..4f233c95140 100644 | ||
467 | --- a/gcc/config/aarch64/aarch64.cc | ||
468 | +++ b/gcc/config/aarch64/aarch64.cc | ||
469 | @@ -9985,7 +9985,6 @@ aarch64_expand_prologue (void) | ||
470 | poly_int64 initial_adjust = frame.initial_adjust; | ||
471 | HOST_WIDE_INT callee_adjust = frame.callee_adjust; | ||
472 | poly_int64 final_adjust = frame.final_adjust; | ||
473 | - poly_int64 callee_offset = frame.callee_offset; | ||
474 | poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
475 | poly_int64 below_hard_fp_saved_regs_size | ||
476 | = frame.below_hard_fp_saved_regs_size; | ||
477 | @@ -10098,8 +10097,7 @@ aarch64_expand_prologue (void) | ||
478 | implicit. */ | ||
479 | if (!find_reg_note (insn, REG_CFA_ADJUST_CFA, NULL_RTX)) | ||
480 | { | ||
481 | - rtx src = plus_constant (Pmode, stack_pointer_rtx, | ||
482 | - callee_offset); | ||
483 | + rtx src = plus_constant (Pmode, stack_pointer_rtx, chain_offset); | ||
484 | add_reg_note (insn, REG_CFA_ADJUST_CFA, | ||
485 | gen_rtx_SET (hard_frame_pointer_rtx, src)); | ||
486 | } | ||
487 | -- | ||
488 | 2.34.1 | ||
489 | |||
490 | |||
491 | From b36a2a78040722dab6124366c5d6baf8eaf80aef Mon Sep 17 00:00:00 2001 | ||
492 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
493 | Date: Tue, 12 Sep 2023 16:07:13 +0100 | ||
494 | Subject: [PATCH 03/19] aarch64: Explicitly handle frames with no saved | ||
495 | registers | ||
496 | |||
497 | If a frame has no saved registers, it can be allocated in one go. | ||
498 | There is no need to treat the areas below and above the saved | ||
499 | registers as separate. | ||
500 | |||
501 | And if we allocate the frame in one go, it should be allocated | ||
502 | as the initial_adjust rather than the final_adjust. This allows the | ||
503 | frame size to grow to guard_size - guard_used_by_caller before a stack | ||
504 | probe is needed. (A frame with no register saves is necessarily a | ||
505 | leaf frame.) | ||
506 | |||
507 | This is a no-op as thing stand, since a leaf function will have | ||
508 | no outgoing arguments, and so all the frame will be above where | ||
509 | the saved registers normally go. | ||
510 | |||
511 | gcc/ | ||
512 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Explicitly | ||
513 | allocate the frame in one go if there are no saved registers. | ||
514 | --- | ||
515 | gcc/config/aarch64/aarch64.cc | 8 +++++--- | ||
516 | 1 file changed, 5 insertions(+), 3 deletions(-) | ||
517 | |||
518 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
519 | index 4f233c95140..37643041ffb 100644 | ||
520 | --- a/gcc/config/aarch64/aarch64.cc | ||
521 | +++ b/gcc/config/aarch64/aarch64.cc | ||
522 | @@ -8639,9 +8639,11 @@ aarch64_layout_frame (void) | ||
523 | |||
524 | HOST_WIDE_INT const_size, const_outgoing_args_size, const_fp_offset; | ||
525 | HOST_WIDE_INT const_saved_regs_size; | ||
526 | - if (frame.frame_size.is_constant (&const_size) | ||
527 | - && const_size < max_push_offset | ||
528 | - && known_eq (frame.hard_fp_offset, const_size)) | ||
529 | + if (known_eq (frame.saved_regs_size, 0)) | ||
530 | + frame.initial_adjust = frame.frame_size; | ||
531 | + else if (frame.frame_size.is_constant (&const_size) | ||
532 | + && const_size < max_push_offset | ||
533 | + && known_eq (frame.hard_fp_offset, const_size)) | ||
534 | { | ||
535 | /* Simple, small frame with no outgoing arguments: | ||
536 | |||
537 | -- | ||
538 | 2.34.1 | ||
539 | |||
540 | |||
541 | From ada2ab0093596be707f23a3466ac82cff59fcffe Mon Sep 17 00:00:00 2001 | ||
542 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
543 | Date: Tue, 12 Sep 2023 16:07:13 +0100 | ||
544 | Subject: [PATCH 04/19] aarch64: Add bytes_below_saved_regs to frame info | ||
545 | |||
546 | The frame layout code currently hard-codes the assumption that | ||
547 | the number of bytes below the saved registers is equal to the | ||
548 | size of the outgoing arguments. This patch abstracts that | ||
549 | value into a new field of aarch64_frame. | ||
550 | |||
551 | gcc/ | ||
552 | * config/aarch64/aarch64.h (aarch64_frame::bytes_below_saved_regs): New | ||
553 | field. | ||
554 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize it, | ||
555 | and use it instead of crtl->outgoing_args_size. | ||
556 | (aarch64_get_separate_components): Use bytes_below_saved_regs instead | ||
557 | of outgoing_args_size. | ||
558 | (aarch64_process_components): Likewise. | ||
559 | --- | ||
560 | gcc/config/aarch64/aarch64.cc | 71 ++++++++++++++++++----------------- | ||
561 | gcc/config/aarch64/aarch64.h | 5 +++ | ||
562 | 2 files changed, 41 insertions(+), 35 deletions(-) | ||
563 | |||
564 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
565 | index 37643041ffb..dacc2b0e4dd 100644 | ||
566 | --- a/gcc/config/aarch64/aarch64.cc | ||
567 | +++ b/gcc/config/aarch64/aarch64.cc | ||
568 | @@ -8478,6 +8478,8 @@ aarch64_layout_frame (void) | ||
569 | gcc_assert (crtl->is_leaf | ||
570 | || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED)); | ||
571 | |||
572 | + frame.bytes_below_saved_regs = crtl->outgoing_args_size; | ||
573 | + | ||
574 | /* Now assign stack slots for the registers. Start with the predicate | ||
575 | registers, since predicate LDR and STR have a relatively small | ||
576 | offset range. These saves happen below the hard frame pointer. */ | ||
577 | @@ -8582,18 +8584,18 @@ aarch64_layout_frame (void) | ||
578 | |||
579 | poly_int64 varargs_and_saved_regs_size = offset + frame.saved_varargs_size; | ||
580 | |||
581 | - poly_int64 above_outgoing_args | ||
582 | + poly_int64 saved_regs_and_above | ||
583 | = aligned_upper_bound (varargs_and_saved_regs_size | ||
584 | + get_frame_size (), | ||
585 | STACK_BOUNDARY / BITS_PER_UNIT); | ||
586 | |||
587 | frame.hard_fp_offset | ||
588 | - = above_outgoing_args - frame.below_hard_fp_saved_regs_size; | ||
589 | + = saved_regs_and_above - frame.below_hard_fp_saved_regs_size; | ||
590 | |||
591 | /* Both these values are already aligned. */ | ||
592 | - gcc_assert (multiple_p (crtl->outgoing_args_size, | ||
593 | + gcc_assert (multiple_p (frame.bytes_below_saved_regs, | ||
594 | STACK_BOUNDARY / BITS_PER_UNIT)); | ||
595 | - frame.frame_size = above_outgoing_args + crtl->outgoing_args_size; | ||
596 | + frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs; | ||
597 | |||
598 | frame.locals_offset = frame.saved_varargs_size; | ||
599 | |||
600 | @@ -8637,7 +8639,7 @@ aarch64_layout_frame (void) | ||
601 | else if (frame.wb_pop_candidate1 != INVALID_REGNUM) | ||
602 | max_push_offset = 256; | ||
603 | |||
604 | - HOST_WIDE_INT const_size, const_outgoing_args_size, const_fp_offset; | ||
605 | + HOST_WIDE_INT const_size, const_below_saved_regs, const_fp_offset; | ||
606 | HOST_WIDE_INT const_saved_regs_size; | ||
607 | if (known_eq (frame.saved_regs_size, 0)) | ||
608 | frame.initial_adjust = frame.frame_size; | ||
609 | @@ -8645,31 +8647,31 @@ aarch64_layout_frame (void) | ||
610 | && const_size < max_push_offset | ||
611 | && known_eq (frame.hard_fp_offset, const_size)) | ||
612 | { | ||
613 | - /* Simple, small frame with no outgoing arguments: | ||
614 | + /* Simple, small frame with no data below the saved registers. | ||
615 | |||
616 | stp reg1, reg2, [sp, -frame_size]! | ||
617 | stp reg3, reg4, [sp, 16] */ | ||
618 | frame.callee_adjust = const_size; | ||
619 | } | ||
620 | - else if (crtl->outgoing_args_size.is_constant (&const_outgoing_args_size) | ||
621 | + else if (frame.bytes_below_saved_regs.is_constant (&const_below_saved_regs) | ||
622 | && frame.saved_regs_size.is_constant (&const_saved_regs_size) | ||
623 | - && const_outgoing_args_size + const_saved_regs_size < 512 | ||
624 | - /* We could handle this case even with outgoing args, provided | ||
625 | - that the number of args left us with valid offsets for all | ||
626 | - predicate and vector save slots. It's such a rare case that | ||
627 | - it hardly seems worth the effort though. */ | ||
628 | - && (!saves_below_hard_fp_p || const_outgoing_args_size == 0) | ||
629 | + && const_below_saved_regs + const_saved_regs_size < 512 | ||
630 | + /* We could handle this case even with data below the saved | ||
631 | + registers, provided that that data left us with valid offsets | ||
632 | + for all predicate and vector save slots. It's such a rare | ||
633 | + case that it hardly seems worth the effort though. */ | ||
634 | + && (!saves_below_hard_fp_p || const_below_saved_regs == 0) | ||
635 | && !(cfun->calls_alloca | ||
636 | && frame.hard_fp_offset.is_constant (&const_fp_offset) | ||
637 | && const_fp_offset < max_push_offset)) | ||
638 | { | ||
639 | - /* Frame with small outgoing arguments: | ||
640 | + /* Frame with small area below the saved registers: | ||
641 | |||
642 | sub sp, sp, frame_size | ||
643 | - stp reg1, reg2, [sp, outgoing_args_size] | ||
644 | - stp reg3, reg4, [sp, outgoing_args_size + 16] */ | ||
645 | + stp reg1, reg2, [sp, bytes_below_saved_regs] | ||
646 | + stp reg3, reg4, [sp, bytes_below_saved_regs + 16] */ | ||
647 | frame.initial_adjust = frame.frame_size; | ||
648 | - frame.callee_offset = const_outgoing_args_size; | ||
649 | + frame.callee_offset = const_below_saved_regs; | ||
650 | } | ||
651 | else if (saves_below_hard_fp_p | ||
652 | && known_eq (frame.saved_regs_size, | ||
653 | @@ -8679,30 +8681,29 @@ aarch64_layout_frame (void) | ||
654 | |||
655 | sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size | ||
656 | save SVE registers relative to SP | ||
657 | - sub sp, sp, outgoing_args_size */ | ||
658 | + sub sp, sp, bytes_below_saved_regs */ | ||
659 | frame.initial_adjust = (frame.hard_fp_offset | ||
660 | + frame.below_hard_fp_saved_regs_size); | ||
661 | - frame.final_adjust = crtl->outgoing_args_size; | ||
662 | + frame.final_adjust = frame.bytes_below_saved_regs; | ||
663 | } | ||
664 | else if (frame.hard_fp_offset.is_constant (&const_fp_offset) | ||
665 | && const_fp_offset < max_push_offset) | ||
666 | { | ||
667 | - /* Frame with large outgoing arguments or SVE saves, but with | ||
668 | - a small local area: | ||
669 | + /* Frame with large area below the saved registers, or with SVE saves, | ||
670 | + but with a small area above: | ||
671 | |||
672 | stp reg1, reg2, [sp, -hard_fp_offset]! | ||
673 | stp reg3, reg4, [sp, 16] | ||
674 | [sub sp, sp, below_hard_fp_saved_regs_size] | ||
675 | [save SVE registers relative to SP] | ||
676 | - sub sp, sp, outgoing_args_size */ | ||
677 | + sub sp, sp, bytes_below_saved_regs */ | ||
678 | frame.callee_adjust = const_fp_offset; | ||
679 | frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
680 | - frame.final_adjust = crtl->outgoing_args_size; | ||
681 | + frame.final_adjust = frame.bytes_below_saved_regs; | ||
682 | } | ||
683 | else | ||
684 | { | ||
685 | - /* Frame with large local area and outgoing arguments or SVE saves, | ||
686 | - using frame pointer: | ||
687 | + /* General case: | ||
688 | |||
689 | sub sp, sp, hard_fp_offset | ||
690 | stp x29, x30, [sp, 0] | ||
691 | @@ -8710,10 +8711,10 @@ aarch64_layout_frame (void) | ||
692 | stp reg3, reg4, [sp, 16] | ||
693 | [sub sp, sp, below_hard_fp_saved_regs_size] | ||
694 | [save SVE registers relative to SP] | ||
695 | - sub sp, sp, outgoing_args_size */ | ||
696 | + sub sp, sp, bytes_below_saved_regs */ | ||
697 | frame.initial_adjust = frame.hard_fp_offset; | ||
698 | frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
699 | - frame.final_adjust = crtl->outgoing_args_size; | ||
700 | + frame.final_adjust = frame.bytes_below_saved_regs; | ||
701 | } | ||
702 | |||
703 | /* Make sure the individual adjustments add up to the full frame size. */ | ||
704 | @@ -9358,7 +9359,7 @@ aarch64_get_separate_components (void) | ||
705 | if (frame_pointer_needed) | ||
706 | offset -= frame.below_hard_fp_saved_regs_size; | ||
707 | else | ||
708 | - offset += crtl->outgoing_args_size; | ||
709 | + offset += frame.bytes_below_saved_regs; | ||
710 | |||
711 | /* Check that we can access the stack slot of the register with one | ||
712 | direct load with no adjustments needed. */ | ||
713 | @@ -9507,7 +9508,7 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
714 | if (frame_pointer_needed) | ||
715 | offset -= frame.below_hard_fp_saved_regs_size; | ||
716 | else | ||
717 | - offset += crtl->outgoing_args_size; | ||
718 | + offset += frame.bytes_below_saved_regs; | ||
719 | |||
720 | rtx addr = plus_constant (Pmode, ptr_reg, offset); | ||
721 | rtx mem = gen_frame_mem (mode, addr); | ||
722 | @@ -9561,7 +9562,7 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
723 | if (frame_pointer_needed) | ||
724 | offset2 -= frame.below_hard_fp_saved_regs_size; | ||
725 | else | ||
726 | - offset2 += crtl->outgoing_args_size; | ||
727 | + offset2 += frame.bytes_below_saved_regs; | ||
728 | rtx addr2 = plus_constant (Pmode, ptr_reg, offset2); | ||
729 | rtx mem2 = gen_frame_mem (mode, addr2); | ||
730 | rtx set2 = prologue_p ? gen_rtx_SET (mem2, reg2) | ||
731 | @@ -9635,10 +9636,10 @@ aarch64_stack_clash_protection_alloca_probe_range (void) | ||
732 | registers. If POLY_SIZE is not large enough to require a probe this function | ||
733 | will only adjust the stack. When allocating the stack space | ||
734 | FRAME_RELATED_P is then used to indicate if the allocation is frame related. | ||
735 | - FINAL_ADJUSTMENT_P indicates whether we are allocating the outgoing | ||
736 | - arguments. If we are then we ensure that any allocation larger than the ABI | ||
737 | - defined buffer needs a probe so that the invariant of having a 1KB buffer is | ||
738 | - maintained. | ||
739 | + FINAL_ADJUSTMENT_P indicates whether we are allocating the area below | ||
740 | + the saved registers. If we are then we ensure that any allocation | ||
741 | + larger than the ABI defined buffer needs a probe so that the | ||
742 | + invariant of having a 1KB buffer is maintained. | ||
743 | |||
744 | We emit barriers after each stack adjustment to prevent optimizations from | ||
745 | breaking the invariant that we never drop the stack more than a page. This | ||
746 | @@ -9847,7 +9848,7 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
747 | /* Handle any residuals. Residuals of at least MIN_PROBE_THRESHOLD have to | ||
748 | be probed. This maintains the requirement that each page is probed at | ||
749 | least once. For initial probing we probe only if the allocation is | ||
750 | - more than GUARD_SIZE - buffer, and for the outgoing arguments we probe | ||
751 | + more than GUARD_SIZE - buffer, and below the saved registers we probe | ||
752 | if the amount is larger than buffer. GUARD_SIZE - buffer + buffer == | ||
753 | GUARD_SIZE. This works that for any allocation that is large enough to | ||
754 | trigger a probe here, we'll have at least one, and if they're not large | ||
755 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
756 | index 73b09e20508..0b6faa3ddf1 100644 | ||
757 | --- a/gcc/config/aarch64/aarch64.h | ||
758 | +++ b/gcc/config/aarch64/aarch64.h | ||
759 | @@ -777,6 +777,11 @@ struct GTY (()) aarch64_frame | ||
760 | /* The size of the callee-save registers with a slot in REG_OFFSET. */ | ||
761 | poly_int64 saved_regs_size; | ||
762 | |||
763 | + /* The number of bytes between the bottom of the static frame (the bottom | ||
764 | + of the outgoing arguments) and the bottom of the register save area. | ||
765 | + This value is always a multiple of STACK_BOUNDARY. */ | ||
766 | + poly_int64 bytes_below_saved_regs; | ||
767 | + | ||
768 | /* The size of the callee-save registers with a slot in REG_OFFSET that | ||
769 | are saved below the hard frame pointer. */ | ||
770 | poly_int64 below_hard_fp_saved_regs_size; | ||
771 | -- | ||
772 | 2.34.1 | ||
773 | |||
774 | |||
775 | From 82f6b3e1b596ef0f4e3ac3bb9c6e88fb4458f402 Mon Sep 17 00:00:00 2001 | ||
776 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
777 | Date: Tue, 12 Sep 2023 16:07:14 +0100 | ||
778 | Subject: [PATCH 05/19] aarch64: Add bytes_below_hard_fp to frame info | ||
779 | |||
780 | Following on from the previous bytes_below_saved_regs patch, this one | ||
781 | records the number of bytes that are below the hard frame pointer. | ||
782 | This eventually replaces below_hard_fp_saved_regs_size. | ||
783 | |||
784 | If a frame pointer is not needed, the epilogue adds final_adjust | ||
785 | to the stack pointer before restoring registers: | ||
786 | |||
787 | aarch64_add_sp (tmp1_rtx, tmp0_rtx, final_adjust, true); | ||
788 | |||
789 | Therefore, if the epilogue needs to restore the stack pointer from | ||
790 | the hard frame pointer, the directly corresponding offset is: | ||
791 | |||
792 | -bytes_below_hard_fp + final_adjust | ||
793 | |||
794 | i.e. go from the hard frame pointer to the bottom of the frame, | ||
795 | then add the same amount as if we were using the stack pointer | ||
796 | from the outset. | ||
797 | |||
798 | gcc/ | ||
799 | * config/aarch64/aarch64.h (aarch64_frame::bytes_below_hard_fp): New | ||
800 | field. | ||
801 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize it. | ||
802 | (aarch64_expand_epilogue): Use it instead of | ||
803 | below_hard_fp_saved_regs_size. | ||
804 | --- | ||
805 | gcc/config/aarch64/aarch64.cc | 6 +++--- | ||
806 | gcc/config/aarch64/aarch64.h | 5 +++++ | ||
807 | 2 files changed, 8 insertions(+), 3 deletions(-) | ||
808 | |||
809 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
810 | index dacc2b0e4dd..a3f7aabcc59 100644 | ||
811 | --- a/gcc/config/aarch64/aarch64.cc | ||
812 | +++ b/gcc/config/aarch64/aarch64.cc | ||
813 | @@ -8530,6 +8530,7 @@ aarch64_layout_frame (void) | ||
814 | of the callee save area. */ | ||
815 | bool saves_below_hard_fp_p = maybe_ne (offset, 0); | ||
816 | frame.below_hard_fp_saved_regs_size = offset; | ||
817 | + frame.bytes_below_hard_fp = offset + frame.bytes_below_saved_regs; | ||
818 | if (frame.emit_frame_chain) | ||
819 | { | ||
820 | /* FP and LR are placed in the linkage record. */ | ||
821 | @@ -10171,8 +10172,7 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
822 | poly_int64 final_adjust = frame.final_adjust; | ||
823 | poly_int64 callee_offset = frame.callee_offset; | ||
824 | poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
825 | - poly_int64 below_hard_fp_saved_regs_size | ||
826 | - = frame.below_hard_fp_saved_regs_size; | ||
827 | + poly_int64 bytes_below_hard_fp = frame.bytes_below_hard_fp; | ||
828 | unsigned reg1 = frame.wb_pop_candidate1; | ||
829 | unsigned reg2 = frame.wb_pop_candidate2; | ||
830 | unsigned int last_gpr = (frame.is_scs_enabled | ||
831 | @@ -10230,7 +10230,7 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
832 | is restored on the instruction doing the writeback. */ | ||
833 | aarch64_add_offset (Pmode, stack_pointer_rtx, | ||
834 | hard_frame_pointer_rtx, | ||
835 | - -callee_offset - below_hard_fp_saved_regs_size, | ||
836 | + -bytes_below_hard_fp + final_adjust, | ||
837 | tmp1_rtx, tmp0_rtx, callee_adjust == 0); | ||
838 | else | ||
839 | /* The case where we need to re-use the register here is very rare, so | ||
840 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
841 | index 0b6faa3ddf1..4263d29d29d 100644 | ||
842 | --- a/gcc/config/aarch64/aarch64.h | ||
843 | +++ b/gcc/config/aarch64/aarch64.h | ||
844 | @@ -786,6 +786,11 @@ struct GTY (()) aarch64_frame | ||
845 | are saved below the hard frame pointer. */ | ||
846 | poly_int64 below_hard_fp_saved_regs_size; | ||
847 | |||
848 | + /* The number of bytes between the bottom of the static frame (the bottom | ||
849 | + of the outgoing arguments) and the hard frame pointer. This value is | ||
850 | + always a multiple of STACK_BOUNDARY. */ | ||
851 | + poly_int64 bytes_below_hard_fp; | ||
852 | + | ||
853 | /* Offset from the base of the frame (incomming SP) to the | ||
854 | top of the locals area. This value is always a multiple of | ||
855 | STACK_BOUNDARY. */ | ||
856 | -- | ||
857 | 2.34.1 | ||
858 | |||
859 | |||
860 | From 86fa43e9fe4a8bf954f2919f07cbe3646d1d1df3 Mon Sep 17 00:00:00 2001 | ||
861 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
862 | Date: Tue, 12 Sep 2023 16:07:14 +0100 | ||
863 | Subject: [PATCH 06/19] aarch64: Tweak aarch64_save/restore_callee_saves | ||
864 | |||
865 | aarch64_save_callee_saves and aarch64_restore_callee_saves took | ||
866 | a parameter called start_offset that gives the offset of the | ||
867 | bottom of the saved register area from the current stack pointer. | ||
868 | However, it's more convenient for later patches if we use the | ||
869 | bottom of the entire frame as the reference point, rather than | ||
870 | the bottom of the saved registers. | ||
871 | |||
872 | Doing that removes the need for the callee_offset field. | ||
873 | Other than that, this is not a win on its own. It only really | ||
874 | makes sense in combination with the follow-on patches. | ||
875 | |||
876 | gcc/ | ||
877 | * config/aarch64/aarch64.h (aarch64_frame::callee_offset): Delete. | ||
878 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Remove | ||
879 | callee_offset handling. | ||
880 | (aarch64_save_callee_saves): Replace the start_offset parameter | ||
881 | with a bytes_below_sp parameter. | ||
882 | (aarch64_restore_callee_saves): Likewise. | ||
883 | (aarch64_expand_prologue): Update accordingly. | ||
884 | (aarch64_expand_epilogue): Likewise. | ||
885 | --- | ||
886 | gcc/config/aarch64/aarch64.cc | 56 +++++++++++++++++------------------ | ||
887 | gcc/config/aarch64/aarch64.h | 4 --- | ||
888 | 2 files changed, 28 insertions(+), 32 deletions(-) | ||
889 | |||
890 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
891 | index a3f7aabcc59..46ae5cf7673 100644 | ||
892 | --- a/gcc/config/aarch64/aarch64.cc | ||
893 | +++ b/gcc/config/aarch64/aarch64.cc | ||
894 | @@ -8604,7 +8604,6 @@ aarch64_layout_frame (void) | ||
895 | frame.final_adjust = 0; | ||
896 | frame.callee_adjust = 0; | ||
897 | frame.sve_callee_adjust = 0; | ||
898 | - frame.callee_offset = 0; | ||
899 | |||
900 | frame.wb_pop_candidate1 = frame.wb_push_candidate1; | ||
901 | frame.wb_pop_candidate2 = frame.wb_push_candidate2; | ||
902 | @@ -8672,7 +8671,6 @@ aarch64_layout_frame (void) | ||
903 | stp reg1, reg2, [sp, bytes_below_saved_regs] | ||
904 | stp reg3, reg4, [sp, bytes_below_saved_regs + 16] */ | ||
905 | frame.initial_adjust = frame.frame_size; | ||
906 | - frame.callee_offset = const_below_saved_regs; | ||
907 | } | ||
908 | else if (saves_below_hard_fp_p | ||
909 | && known_eq (frame.saved_regs_size, | ||
910 | @@ -9073,12 +9071,13 @@ aarch64_add_cfa_expression (rtx_insn *insn, rtx reg, | ||
911 | } | ||
912 | |||
913 | /* Emit code to save the callee-saved registers from register number START | ||
914 | - to LIMIT to the stack at the location starting at offset START_OFFSET, | ||
915 | - skipping any write-back candidates if SKIP_WB is true. HARD_FP_VALID_P | ||
916 | - is true if the hard frame pointer has been set up. */ | ||
917 | + to LIMIT to the stack. The stack pointer is currently BYTES_BELOW_SP | ||
918 | + bytes above the bottom of the static frame. Skip any write-back | ||
919 | + candidates if SKIP_WB is true. HARD_FP_VALID_P is true if the hard | ||
920 | + frame pointer has been set up. */ | ||
921 | |||
922 | static void | ||
923 | -aarch64_save_callee_saves (poly_int64 start_offset, | ||
924 | +aarch64_save_callee_saves (poly_int64 bytes_below_sp, | ||
925 | unsigned start, unsigned limit, bool skip_wb, | ||
926 | bool hard_fp_valid_p) | ||
927 | { | ||
928 | @@ -9106,7 +9105,9 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
929 | |||
930 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
931 | reg = gen_rtx_REG (mode, regno); | ||
932 | - offset = start_offset + frame.reg_offset[regno]; | ||
933 | + offset = (frame.reg_offset[regno] | ||
934 | + + frame.bytes_below_saved_regs | ||
935 | + - bytes_below_sp); | ||
936 | rtx base_rtx = stack_pointer_rtx; | ||
937 | poly_int64 sp_offset = offset; | ||
938 | |||
939 | @@ -9117,9 +9118,7 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
940 | else if (GP_REGNUM_P (regno) | ||
941 | && (!offset.is_constant (&const_offset) || const_offset >= 512)) | ||
942 | { | ||
943 | - gcc_assert (known_eq (start_offset, 0)); | ||
944 | - poly_int64 fp_offset | ||
945 | - = frame.below_hard_fp_saved_regs_size; | ||
946 | + poly_int64 fp_offset = frame.bytes_below_hard_fp - bytes_below_sp; | ||
947 | if (hard_fp_valid_p) | ||
948 | base_rtx = hard_frame_pointer_rtx; | ||
949 | else | ||
950 | @@ -9183,12 +9182,13 @@ aarch64_save_callee_saves (poly_int64 start_offset, | ||
951 | } | ||
952 | |||
953 | /* Emit code to restore the callee registers from register number START | ||
954 | - up to and including LIMIT. Restore from the stack offset START_OFFSET, | ||
955 | - skipping any write-back candidates if SKIP_WB is true. Write the | ||
956 | - appropriate REG_CFA_RESTORE notes into CFI_OPS. */ | ||
957 | + up to and including LIMIT. The stack pointer is currently BYTES_BELOW_SP | ||
958 | + bytes above the bottom of the static frame. Skip any write-back | ||
959 | + candidates if SKIP_WB is true. Write the appropriate REG_CFA_RESTORE | ||
960 | + notes into CFI_OPS. */ | ||
961 | |||
962 | static void | ||
963 | -aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start, | ||
964 | +aarch64_restore_callee_saves (poly_int64 bytes_below_sp, unsigned start, | ||
965 | unsigned limit, bool skip_wb, rtx *cfi_ops) | ||
966 | { | ||
967 | aarch64_frame &frame = cfun->machine->frame; | ||
968 | @@ -9214,7 +9214,9 @@ aarch64_restore_callee_saves (poly_int64 start_offset, unsigned start, | ||
969 | |||
970 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
971 | reg = gen_rtx_REG (mode, regno); | ||
972 | - offset = start_offset + frame.reg_offset[regno]; | ||
973 | + offset = (frame.reg_offset[regno] | ||
974 | + + frame.bytes_below_saved_regs | ||
975 | + - bytes_below_sp); | ||
976 | rtx base_rtx = stack_pointer_rtx; | ||
977 | if (mode == VNx2DImode && BYTES_BIG_ENDIAN) | ||
978 | aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg, | ||
979 | @@ -9990,8 +9992,6 @@ aarch64_expand_prologue (void) | ||
980 | HOST_WIDE_INT callee_adjust = frame.callee_adjust; | ||
981 | poly_int64 final_adjust = frame.final_adjust; | ||
982 | poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
983 | - poly_int64 below_hard_fp_saved_regs_size | ||
984 | - = frame.below_hard_fp_saved_regs_size; | ||
985 | unsigned reg1 = frame.wb_push_candidate1; | ||
986 | unsigned reg2 = frame.wb_push_candidate2; | ||
987 | bool emit_frame_chain = frame.emit_frame_chain; | ||
988 | @@ -10067,8 +10067,8 @@ aarch64_expand_prologue (void) | ||
989 | - frame.hard_fp_offset); | ||
990 | gcc_assert (known_ge (chain_offset, 0)); | ||
991 | |||
992 | - /* The offset of the bottom of the save area from the current SP. */ | ||
993 | - poly_int64 saved_regs_offset = chain_offset - below_hard_fp_saved_regs_size; | ||
994 | + /* The offset of the current SP from the bottom of the static frame. */ | ||
995 | + poly_int64 bytes_below_sp = frame_size - initial_adjust - callee_adjust; | ||
996 | |||
997 | if (emit_frame_chain) | ||
998 | { | ||
999 | @@ -10076,7 +10076,7 @@ aarch64_expand_prologue (void) | ||
1000 | { | ||
1001 | reg1 = R29_REGNUM; | ||
1002 | reg2 = R30_REGNUM; | ||
1003 | - aarch64_save_callee_saves (saved_regs_offset, reg1, reg2, | ||
1004 | + aarch64_save_callee_saves (bytes_below_sp, reg1, reg2, | ||
1005 | false, false); | ||
1006 | } | ||
1007 | else | ||
1008 | @@ -10116,7 +10116,7 @@ aarch64_expand_prologue (void) | ||
1009 | emit_insn (gen_stack_tie (stack_pointer_rtx, hard_frame_pointer_rtx)); | ||
1010 | } | ||
1011 | |||
1012 | - aarch64_save_callee_saves (saved_regs_offset, R0_REGNUM, R30_REGNUM, | ||
1013 | + aarch64_save_callee_saves (bytes_below_sp, R0_REGNUM, R30_REGNUM, | ||
1014 | callee_adjust != 0 || emit_frame_chain, | ||
1015 | emit_frame_chain); | ||
1016 | if (maybe_ne (sve_callee_adjust, 0)) | ||
1017 | @@ -10126,16 +10126,17 @@ aarch64_expand_prologue (void) | ||
1018 | aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx, | ||
1019 | sve_callee_adjust, | ||
1020 | !frame_pointer_needed, false); | ||
1021 | - saved_regs_offset += sve_callee_adjust; | ||
1022 | + bytes_below_sp -= sve_callee_adjust; | ||
1023 | } | ||
1024 | - aarch64_save_callee_saves (saved_regs_offset, P0_REGNUM, P15_REGNUM, | ||
1025 | + aarch64_save_callee_saves (bytes_below_sp, P0_REGNUM, P15_REGNUM, | ||
1026 | false, emit_frame_chain); | ||
1027 | - aarch64_save_callee_saves (saved_regs_offset, V0_REGNUM, V31_REGNUM, | ||
1028 | + aarch64_save_callee_saves (bytes_below_sp, V0_REGNUM, V31_REGNUM, | ||
1029 | callee_adjust != 0 || emit_frame_chain, | ||
1030 | emit_frame_chain); | ||
1031 | |||
1032 | /* We may need to probe the final adjustment if it is larger than the guard | ||
1033 | that is assumed by the called. */ | ||
1034 | + gcc_assert (known_eq (bytes_below_sp, final_adjust)); | ||
1035 | aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx, final_adjust, | ||
1036 | !frame_pointer_needed, true); | ||
1037 | } | ||
1038 | @@ -10170,7 +10171,6 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
1039 | poly_int64 initial_adjust = frame.initial_adjust; | ||
1040 | HOST_WIDE_INT callee_adjust = frame.callee_adjust; | ||
1041 | poly_int64 final_adjust = frame.final_adjust; | ||
1042 | - poly_int64 callee_offset = frame.callee_offset; | ||
1043 | poly_int64 sve_callee_adjust = frame.sve_callee_adjust; | ||
1044 | poly_int64 bytes_below_hard_fp = frame.bytes_below_hard_fp; | ||
1045 | unsigned reg1 = frame.wb_pop_candidate1; | ||
1046 | @@ -10240,9 +10240,9 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
1047 | |||
1048 | /* Restore the vector registers before the predicate registers, | ||
1049 | so that we can use P4 as a temporary for big-endian SVE frames. */ | ||
1050 | - aarch64_restore_callee_saves (callee_offset, V0_REGNUM, V31_REGNUM, | ||
1051 | + aarch64_restore_callee_saves (final_adjust, V0_REGNUM, V31_REGNUM, | ||
1052 | callee_adjust != 0, &cfi_ops); | ||
1053 | - aarch64_restore_callee_saves (callee_offset, P0_REGNUM, P15_REGNUM, | ||
1054 | + aarch64_restore_callee_saves (final_adjust, P0_REGNUM, P15_REGNUM, | ||
1055 | false, &cfi_ops); | ||
1056 | if (maybe_ne (sve_callee_adjust, 0)) | ||
1057 | aarch64_add_sp (NULL_RTX, NULL_RTX, sve_callee_adjust, true); | ||
1058 | @@ -10250,7 +10250,7 @@ aarch64_expand_epilogue (bool for_sibcall) | ||
1059 | /* When shadow call stack is enabled, the scs_pop in the epilogue will | ||
1060 | restore x30, we don't need to restore x30 again in the traditional | ||
1061 | way. */ | ||
1062 | - aarch64_restore_callee_saves (callee_offset - sve_callee_adjust, | ||
1063 | + aarch64_restore_callee_saves (final_adjust + sve_callee_adjust, | ||
1064 | R0_REGNUM, last_gpr, | ||
1065 | callee_adjust != 0, &cfi_ops); | ||
1066 | |||
1067 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
1068 | index 4263d29d29d..fd820b1be4e 100644 | ||
1069 | --- a/gcc/config/aarch64/aarch64.h | ||
1070 | +++ b/gcc/config/aarch64/aarch64.h | ||
1071 | @@ -813,10 +813,6 @@ struct GTY (()) aarch64_frame | ||
1072 | It is zero when no push is used. */ | ||
1073 | HOST_WIDE_INT callee_adjust; | ||
1074 | |||
1075 | - /* The offset from SP to the callee-save registers after initial_adjust. | ||
1076 | - It may be non-zero if no push is used (ie. callee_adjust == 0). */ | ||
1077 | - poly_int64 callee_offset; | ||
1078 | - | ||
1079 | /* The size of the stack adjustment before saving or after restoring | ||
1080 | SVE registers. */ | ||
1081 | poly_int64 sve_callee_adjust; | ||
1082 | -- | ||
1083 | 2.34.1 | ||
1084 | |||
1085 | |||
1086 | From 8ae9181426f2700c2e5a2909487fa630e6fa406b Mon Sep 17 00:00:00 2001 | ||
1087 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1088 | Date: Tue, 12 Sep 2023 16:07:15 +0100 | ||
1089 | Subject: [PATCH 07/19] aarch64: Only calculate chain_offset if there is a | ||
1090 | chain | ||
1091 | |||
1092 | After previous patches, it is no longer necessary to calculate | ||
1093 | a chain_offset in cases where there is no chain record. | ||
1094 | |||
1095 | gcc/ | ||
1096 | * config/aarch64/aarch64.cc (aarch64_expand_prologue): Move the | ||
1097 | calculation of chain_offset into the emit_frame_chain block. | ||
1098 | --- | ||
1099 | gcc/config/aarch64/aarch64.cc | 10 +++++----- | ||
1100 | 1 file changed, 5 insertions(+), 5 deletions(-) | ||
1101 | |||
1102 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1103 | index 46ae5cf7673..0e9b9717c08 100644 | ||
1104 | --- a/gcc/config/aarch64/aarch64.cc | ||
1105 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1106 | @@ -10062,16 +10062,16 @@ aarch64_expand_prologue (void) | ||
1107 | if (callee_adjust != 0) | ||
1108 | aarch64_push_regs (reg1, reg2, callee_adjust); | ||
1109 | |||
1110 | - /* The offset of the frame chain record (if any) from the current SP. */ | ||
1111 | - poly_int64 chain_offset = (initial_adjust + callee_adjust | ||
1112 | - - frame.hard_fp_offset); | ||
1113 | - gcc_assert (known_ge (chain_offset, 0)); | ||
1114 | - | ||
1115 | /* The offset of the current SP from the bottom of the static frame. */ | ||
1116 | poly_int64 bytes_below_sp = frame_size - initial_adjust - callee_adjust; | ||
1117 | |||
1118 | if (emit_frame_chain) | ||
1119 | { | ||
1120 | + /* The offset of the frame chain record (if any) from the current SP. */ | ||
1121 | + poly_int64 chain_offset = (initial_adjust + callee_adjust | ||
1122 | + - frame.hard_fp_offset); | ||
1123 | + gcc_assert (known_ge (chain_offset, 0)); | ||
1124 | + | ||
1125 | if (callee_adjust == 0) | ||
1126 | { | ||
1127 | reg1 = R29_REGNUM; | ||
1128 | -- | ||
1129 | 2.34.1 | ||
1130 | |||
1131 | |||
1132 | From 375794feb614cee1f41b710b9cc1b6f25da6c1cb Mon Sep 17 00:00:00 2001 | ||
1133 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1134 | Date: Tue, 12 Sep 2023 16:07:15 +0100 | ||
1135 | Subject: [PATCH 08/19] aarch64: Rename locals_offset to bytes_above_locals | ||
1136 | MIME-Version: 1.0 | ||
1137 | Content-Type: text/plain; charset=UTF-8 | ||
1138 | Content-Transfer-Encoding: 8bit | ||
1139 | |||
1140 | locals_offset was described as: | ||
1141 | |||
1142 | /* Offset from the base of the frame (incomming SP) to the | ||
1143 | top of the locals area. This value is always a multiple of | ||
1144 | STACK_BOUNDARY. */ | ||
1145 | |||
1146 | This is implicitly an “upside down†view of the frame: the incoming | ||
1147 | SP is at offset 0, and anything N bytes below the incoming SP is at | ||
1148 | offset N (rather than -N). | ||
1149 | |||
1150 | However, reg_offset instead uses a “right way up†view; that is, | ||
1151 | it views offsets in address terms. Something above X is at a | ||
1152 | positive offset from X and something below X is at a negative | ||
1153 | offset from X. | ||
1154 | |||
1155 | Also, even on FRAME_GROWS_DOWNWARD targets like AArch64, | ||
1156 | target-independent code views offsets in address terms too: | ||
1157 | locals are allocated at negative offsets to virtual_stack_vars. | ||
1158 | |||
1159 | It seems confusing to have *_offset fields of the same structure | ||
1160 | using different polarities like this. This patch tries to avoid | ||
1161 | that by renaming locals_offset to bytes_above_locals. | ||
1162 | |||
1163 | gcc/ | ||
1164 | * config/aarch64/aarch64.h (aarch64_frame::locals_offset): Rename to... | ||
1165 | (aarch64_frame::bytes_above_locals): ...this. | ||
1166 | * config/aarch64/aarch64.cc (aarch64_layout_frame) | ||
1167 | (aarch64_initial_elimination_offset): Update accordingly. | ||
1168 | --- | ||
1169 | gcc/config/aarch64/aarch64.cc | 6 +++--- | ||
1170 | gcc/config/aarch64/aarch64.h | 6 +++--- | ||
1171 | 2 files changed, 6 insertions(+), 6 deletions(-) | ||
1172 | |||
1173 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1174 | index 0e9b9717c08..0a22f91520e 100644 | ||
1175 | --- a/gcc/config/aarch64/aarch64.cc | ||
1176 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1177 | @@ -8598,7 +8598,7 @@ aarch64_layout_frame (void) | ||
1178 | STACK_BOUNDARY / BITS_PER_UNIT)); | ||
1179 | frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs; | ||
1180 | |||
1181 | - frame.locals_offset = frame.saved_varargs_size; | ||
1182 | + frame.bytes_above_locals = frame.saved_varargs_size; | ||
1183 | |||
1184 | frame.initial_adjust = 0; | ||
1185 | frame.final_adjust = 0; | ||
1186 | @@ -12754,13 +12754,13 @@ aarch64_initial_elimination_offset (unsigned from, unsigned to) | ||
1187 | return frame.hard_fp_offset; | ||
1188 | |||
1189 | if (from == FRAME_POINTER_REGNUM) | ||
1190 | - return frame.hard_fp_offset - frame.locals_offset; | ||
1191 | + return frame.hard_fp_offset - frame.bytes_above_locals; | ||
1192 | } | ||
1193 | |||
1194 | if (to == STACK_POINTER_REGNUM) | ||
1195 | { | ||
1196 | if (from == FRAME_POINTER_REGNUM) | ||
1197 | - return frame.frame_size - frame.locals_offset; | ||
1198 | + return frame.frame_size - frame.bytes_above_locals; | ||
1199 | } | ||
1200 | |||
1201 | return frame.frame_size; | ||
1202 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
1203 | index fd820b1be4e..7ae12d13e2b 100644 | ||
1204 | --- a/gcc/config/aarch64/aarch64.h | ||
1205 | +++ b/gcc/config/aarch64/aarch64.h | ||
1206 | @@ -791,10 +791,10 @@ struct GTY (()) aarch64_frame | ||
1207 | always a multiple of STACK_BOUNDARY. */ | ||
1208 | poly_int64 bytes_below_hard_fp; | ||
1209 | |||
1210 | - /* Offset from the base of the frame (incomming SP) to the | ||
1211 | - top of the locals area. This value is always a multiple of | ||
1212 | + /* The number of bytes between the top of the locals area and the top | ||
1213 | + of the frame (the incomming SP). This value is always a multiple of | ||
1214 | STACK_BOUNDARY. */ | ||
1215 | - poly_int64 locals_offset; | ||
1216 | + poly_int64 bytes_above_locals; | ||
1217 | |||
1218 | /* Offset from the base of the frame (incomming SP) to the | ||
1219 | hard_frame_pointer. This value is always a multiple of | ||
1220 | -- | ||
1221 | 2.34.1 | ||
1222 | |||
1223 | |||
1224 | From 1a9ea1c45c75615ffbfabe652b3598a1d7be2168 Mon Sep 17 00:00:00 2001 | ||
1225 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1226 | Date: Tue, 12 Sep 2023 16:07:16 +0100 | ||
1227 | Subject: [PATCH 09/19] aarch64: Rename hard_fp_offset to bytes_above_hard_fp | ||
1228 | MIME-Version: 1.0 | ||
1229 | Content-Type: text/plain; charset=UTF-8 | ||
1230 | Content-Transfer-Encoding: 8bit | ||
1231 | |||
1232 | Similarly to the previous locals_offset patch, hard_fp_offset | ||
1233 | was described as: | ||
1234 | |||
1235 | /* Offset from the base of the frame (incomming SP) to the | ||
1236 | hard_frame_pointer. This value is always a multiple of | ||
1237 | STACK_BOUNDARY. */ | ||
1238 | poly_int64 hard_fp_offset; | ||
1239 | |||
1240 | which again took an “upside-down†view: higher offsets meant lower | ||
1241 | addresses. This patch renames the field to bytes_above_hard_fp instead. | ||
1242 | |||
1243 | gcc/ | ||
1244 | * config/aarch64/aarch64.h (aarch64_frame::hard_fp_offset): Rename | ||
1245 | to... | ||
1246 | (aarch64_frame::bytes_above_hard_fp): ...this. | ||
1247 | * config/aarch64/aarch64.cc (aarch64_layout_frame) | ||
1248 | (aarch64_expand_prologue): Update accordingly. | ||
1249 | (aarch64_initial_elimination_offset): Likewise. | ||
1250 | --- | ||
1251 | gcc/config/aarch64/aarch64.cc | 26 +++++++++++++------------- | ||
1252 | gcc/config/aarch64/aarch64.h | 6 +++--- | ||
1253 | 2 files changed, 16 insertions(+), 16 deletions(-) | ||
1254 | |||
1255 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1256 | index 0a22f91520e..95499ae49ba 100644 | ||
1257 | --- a/gcc/config/aarch64/aarch64.cc | ||
1258 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1259 | @@ -8590,7 +8590,7 @@ aarch64_layout_frame (void) | ||
1260 | + get_frame_size (), | ||
1261 | STACK_BOUNDARY / BITS_PER_UNIT); | ||
1262 | |||
1263 | - frame.hard_fp_offset | ||
1264 | + frame.bytes_above_hard_fp | ||
1265 | = saved_regs_and_above - frame.below_hard_fp_saved_regs_size; | ||
1266 | |||
1267 | /* Both these values are already aligned. */ | ||
1268 | @@ -8639,13 +8639,13 @@ aarch64_layout_frame (void) | ||
1269 | else if (frame.wb_pop_candidate1 != INVALID_REGNUM) | ||
1270 | max_push_offset = 256; | ||
1271 | |||
1272 | - HOST_WIDE_INT const_size, const_below_saved_regs, const_fp_offset; | ||
1273 | + HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp; | ||
1274 | HOST_WIDE_INT const_saved_regs_size; | ||
1275 | if (known_eq (frame.saved_regs_size, 0)) | ||
1276 | frame.initial_adjust = frame.frame_size; | ||
1277 | else if (frame.frame_size.is_constant (&const_size) | ||
1278 | && const_size < max_push_offset | ||
1279 | - && known_eq (frame.hard_fp_offset, const_size)) | ||
1280 | + && known_eq (frame.bytes_above_hard_fp, const_size)) | ||
1281 | { | ||
1282 | /* Simple, small frame with no data below the saved registers. | ||
1283 | |||
1284 | @@ -8662,8 +8662,8 @@ aarch64_layout_frame (void) | ||
1285 | case that it hardly seems worth the effort though. */ | ||
1286 | && (!saves_below_hard_fp_p || const_below_saved_regs == 0) | ||
1287 | && !(cfun->calls_alloca | ||
1288 | - && frame.hard_fp_offset.is_constant (&const_fp_offset) | ||
1289 | - && const_fp_offset < max_push_offset)) | ||
1290 | + && frame.bytes_above_hard_fp.is_constant (&const_above_fp) | ||
1291 | + && const_above_fp < max_push_offset)) | ||
1292 | { | ||
1293 | /* Frame with small area below the saved registers: | ||
1294 | |||
1295 | @@ -8681,12 +8681,12 @@ aarch64_layout_frame (void) | ||
1296 | sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size | ||
1297 | save SVE registers relative to SP | ||
1298 | sub sp, sp, bytes_below_saved_regs */ | ||
1299 | - frame.initial_adjust = (frame.hard_fp_offset | ||
1300 | + frame.initial_adjust = (frame.bytes_above_hard_fp | ||
1301 | + frame.below_hard_fp_saved_regs_size); | ||
1302 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
1303 | } | ||
1304 | - else if (frame.hard_fp_offset.is_constant (&const_fp_offset) | ||
1305 | - && const_fp_offset < max_push_offset) | ||
1306 | + else if (frame.bytes_above_hard_fp.is_constant (&const_above_fp) | ||
1307 | + && const_above_fp < max_push_offset) | ||
1308 | { | ||
1309 | /* Frame with large area below the saved registers, or with SVE saves, | ||
1310 | but with a small area above: | ||
1311 | @@ -8696,7 +8696,7 @@ aarch64_layout_frame (void) | ||
1312 | [sub sp, sp, below_hard_fp_saved_regs_size] | ||
1313 | [save SVE registers relative to SP] | ||
1314 | sub sp, sp, bytes_below_saved_regs */ | ||
1315 | - frame.callee_adjust = const_fp_offset; | ||
1316 | + frame.callee_adjust = const_above_fp; | ||
1317 | frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
1318 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
1319 | } | ||
1320 | @@ -8711,7 +8711,7 @@ aarch64_layout_frame (void) | ||
1321 | [sub sp, sp, below_hard_fp_saved_regs_size] | ||
1322 | [save SVE registers relative to SP] | ||
1323 | sub sp, sp, bytes_below_saved_regs */ | ||
1324 | - frame.initial_adjust = frame.hard_fp_offset; | ||
1325 | + frame.initial_adjust = frame.bytes_above_hard_fp; | ||
1326 | frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
1327 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
1328 | } | ||
1329 | @@ -10069,7 +10069,7 @@ aarch64_expand_prologue (void) | ||
1330 | { | ||
1331 | /* The offset of the frame chain record (if any) from the current SP. */ | ||
1332 | poly_int64 chain_offset = (initial_adjust + callee_adjust | ||
1333 | - - frame.hard_fp_offset); | ||
1334 | + - frame.bytes_above_hard_fp); | ||
1335 | gcc_assert (known_ge (chain_offset, 0)); | ||
1336 | |||
1337 | if (callee_adjust == 0) | ||
1338 | @@ -12751,10 +12751,10 @@ aarch64_initial_elimination_offset (unsigned from, unsigned to) | ||
1339 | if (to == HARD_FRAME_POINTER_REGNUM) | ||
1340 | { | ||
1341 | if (from == ARG_POINTER_REGNUM) | ||
1342 | - return frame.hard_fp_offset; | ||
1343 | + return frame.bytes_above_hard_fp; | ||
1344 | |||
1345 | if (from == FRAME_POINTER_REGNUM) | ||
1346 | - return frame.hard_fp_offset - frame.bytes_above_locals; | ||
1347 | + return frame.bytes_above_hard_fp - frame.bytes_above_locals; | ||
1348 | } | ||
1349 | |||
1350 | if (to == STACK_POINTER_REGNUM) | ||
1351 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
1352 | index 7ae12d13e2b..3808f49e9ca 100644 | ||
1353 | --- a/gcc/config/aarch64/aarch64.h | ||
1354 | +++ b/gcc/config/aarch64/aarch64.h | ||
1355 | @@ -796,10 +796,10 @@ struct GTY (()) aarch64_frame | ||
1356 | STACK_BOUNDARY. */ | ||
1357 | poly_int64 bytes_above_locals; | ||
1358 | |||
1359 | - /* Offset from the base of the frame (incomming SP) to the | ||
1360 | - hard_frame_pointer. This value is always a multiple of | ||
1361 | + /* The number of bytes between the hard_frame_pointer and the top of | ||
1362 | + the frame (the incomming SP). This value is always a multiple of | ||
1363 | STACK_BOUNDARY. */ | ||
1364 | - poly_int64 hard_fp_offset; | ||
1365 | + poly_int64 bytes_above_hard_fp; | ||
1366 | |||
1367 | /* The size of the frame. This value is the offset from base of the | ||
1368 | frame (incomming SP) to the stack_pointer. This value is always | ||
1369 | -- | ||
1370 | 2.34.1 | ||
1371 | |||
1372 | |||
1373 | From d202ce1ecf60a36a3e1009917dd76109248ce9be Mon Sep 17 00:00:00 2001 | ||
1374 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1375 | Date: Tue, 12 Sep 2023 16:07:16 +0100 | ||
1376 | Subject: [PATCH 10/19] aarch64: Tweak frame_size comment | ||
1377 | MIME-Version: 1.0 | ||
1378 | Content-Type: text/plain; charset=UTF-8 | ||
1379 | Content-Transfer-Encoding: 8bit | ||
1380 | |||
1381 | This patch fixes another case in which a value was described with | ||
1382 | an “upside-down†view. | ||
1383 | |||
1384 | gcc/ | ||
1385 | * config/aarch64/aarch64.h (aarch64_frame::frame_size): Tweak comment. | ||
1386 | --- | ||
1387 | gcc/config/aarch64/aarch64.h | 4 ++-- | ||
1388 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
1389 | |||
1390 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
1391 | index 3808f49e9ca..108a5731b0d 100644 | ||
1392 | --- a/gcc/config/aarch64/aarch64.h | ||
1393 | +++ b/gcc/config/aarch64/aarch64.h | ||
1394 | @@ -801,8 +801,8 @@ struct GTY (()) aarch64_frame | ||
1395 | STACK_BOUNDARY. */ | ||
1396 | poly_int64 bytes_above_hard_fp; | ||
1397 | |||
1398 | - /* The size of the frame. This value is the offset from base of the | ||
1399 | - frame (incomming SP) to the stack_pointer. This value is always | ||
1400 | + /* The size of the frame, i.e. the number of bytes between the bottom | ||
1401 | + of the outgoing arguments and the incoming SP. This value is always | ||
1402 | a multiple of STACK_BOUNDARY. */ | ||
1403 | poly_int64 frame_size; | ||
1404 | |||
1405 | -- | ||
1406 | 2.34.1 | ||
1407 | |||
1408 | |||
1409 | From f2b585375205b0a1802d79c682ba33766ecd1f0f Mon Sep 17 00:00:00 2001 | ||
1410 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1411 | Date: Tue, 12 Sep 2023 16:07:17 +0100 | ||
1412 | Subject: [PATCH 11/19] aarch64: Measure reg_offset from the bottom of the | ||
1413 | frame | ||
1414 | |||
1415 | reg_offset was measured from the bottom of the saved register area. | ||
1416 | This made perfect sense with the original layout, since the bottom | ||
1417 | of the saved register area was also the hard frame pointer address. | ||
1418 | It became slightly less obvious with SVE, since we save SVE | ||
1419 | registers below the hard frame pointer, but it still made sense. | ||
1420 | |||
1421 | However, if we want to allow different frame layouts, it's more | ||
1422 | convenient and obvious to measure reg_offset from the bottom of | ||
1423 | the frame. After previous patches, it's also a slight simplification | ||
1424 | in its own right. | ||
1425 | |||
1426 | gcc/ | ||
1427 | * config/aarch64/aarch64.h (aarch64_frame): Add comment above | ||
1428 | reg_offset. | ||
1429 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Walk offsets | ||
1430 | from the bottom of the frame, rather than the bottom of the saved | ||
1431 | register area. Measure reg_offset from the bottom of the frame | ||
1432 | rather than the bottom of the saved register area. | ||
1433 | (aarch64_save_callee_saves): Update accordingly. | ||
1434 | (aarch64_restore_callee_saves): Likewise. | ||
1435 | (aarch64_get_separate_components): Likewise. | ||
1436 | (aarch64_process_components): Likewise. | ||
1437 | --- | ||
1438 | gcc/config/aarch64/aarch64.cc | 53 ++++++++++++++++------------------- | ||
1439 | gcc/config/aarch64/aarch64.h | 3 ++ | ||
1440 | 2 files changed, 27 insertions(+), 29 deletions(-) | ||
1441 | |||
1442 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1443 | index 95499ae49ba..af99807ef8a 100644 | ||
1444 | --- a/gcc/config/aarch64/aarch64.cc | ||
1445 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1446 | @@ -8400,7 +8400,6 @@ aarch64_needs_frame_chain (void) | ||
1447 | static void | ||
1448 | aarch64_layout_frame (void) | ||
1449 | { | ||
1450 | - poly_int64 offset = 0; | ||
1451 | int regno, last_fp_reg = INVALID_REGNUM; | ||
1452 | machine_mode vector_save_mode = aarch64_reg_save_mode (V8_REGNUM); | ||
1453 | poly_int64 vector_save_size = GET_MODE_SIZE (vector_save_mode); | ||
1454 | @@ -8478,7 +8477,9 @@ aarch64_layout_frame (void) | ||
1455 | gcc_assert (crtl->is_leaf | ||
1456 | || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED)); | ||
1457 | |||
1458 | - frame.bytes_below_saved_regs = crtl->outgoing_args_size; | ||
1459 | + poly_int64 offset = crtl->outgoing_args_size; | ||
1460 | + gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT)); | ||
1461 | + frame.bytes_below_saved_regs = offset; | ||
1462 | |||
1463 | /* Now assign stack slots for the registers. Start with the predicate | ||
1464 | registers, since predicate LDR and STR have a relatively small | ||
1465 | @@ -8490,7 +8491,8 @@ aarch64_layout_frame (void) | ||
1466 | offset += BYTES_PER_SVE_PRED; | ||
1467 | } | ||
1468 | |||
1469 | - if (maybe_ne (offset, 0)) | ||
1470 | + poly_int64 saved_prs_size = offset - frame.bytes_below_saved_regs; | ||
1471 | + if (maybe_ne (saved_prs_size, 0)) | ||
1472 | { | ||
1473 | /* If we have any vector registers to save above the predicate registers, | ||
1474 | the offset of the vector register save slots need to be a multiple | ||
1475 | @@ -8508,10 +8510,10 @@ aarch64_layout_frame (void) | ||
1476 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
1477 | else | ||
1478 | { | ||
1479 | - if (known_le (offset, vector_save_size)) | ||
1480 | - offset = vector_save_size; | ||
1481 | - else if (known_le (offset, vector_save_size * 2)) | ||
1482 | - offset = vector_save_size * 2; | ||
1483 | + if (known_le (saved_prs_size, vector_save_size)) | ||
1484 | + offset = frame.bytes_below_saved_regs + vector_save_size; | ||
1485 | + else if (known_le (saved_prs_size, vector_save_size * 2)) | ||
1486 | + offset = frame.bytes_below_saved_regs + vector_save_size * 2; | ||
1487 | else | ||
1488 | gcc_unreachable (); | ||
1489 | } | ||
1490 | @@ -8528,9 +8530,10 @@ aarch64_layout_frame (void) | ||
1491 | |||
1492 | /* OFFSET is now the offset of the hard frame pointer from the bottom | ||
1493 | of the callee save area. */ | ||
1494 | - bool saves_below_hard_fp_p = maybe_ne (offset, 0); | ||
1495 | - frame.below_hard_fp_saved_regs_size = offset; | ||
1496 | - frame.bytes_below_hard_fp = offset + frame.bytes_below_saved_regs; | ||
1497 | + frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
1498 | + bool saves_below_hard_fp_p | ||
1499 | + = maybe_ne (frame.below_hard_fp_saved_regs_size, 0); | ||
1500 | + frame.bytes_below_hard_fp = offset; | ||
1501 | if (frame.emit_frame_chain) | ||
1502 | { | ||
1503 | /* FP and LR are placed in the linkage record. */ | ||
1504 | @@ -8581,9 +8584,10 @@ aarch64_layout_frame (void) | ||
1505 | |||
1506 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
1507 | |||
1508 | - frame.saved_regs_size = offset; | ||
1509 | + frame.saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
1510 | |||
1511 | - poly_int64 varargs_and_saved_regs_size = offset + frame.saved_varargs_size; | ||
1512 | + poly_int64 varargs_and_saved_regs_size | ||
1513 | + = frame.saved_regs_size + frame.saved_varargs_size; | ||
1514 | |||
1515 | poly_int64 saved_regs_and_above | ||
1516 | = aligned_upper_bound (varargs_and_saved_regs_size | ||
1517 | @@ -9105,9 +9109,7 @@ aarch64_save_callee_saves (poly_int64 bytes_below_sp, | ||
1518 | |||
1519 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
1520 | reg = gen_rtx_REG (mode, regno); | ||
1521 | - offset = (frame.reg_offset[regno] | ||
1522 | - + frame.bytes_below_saved_regs | ||
1523 | - - bytes_below_sp); | ||
1524 | + offset = frame.reg_offset[regno] - bytes_below_sp; | ||
1525 | rtx base_rtx = stack_pointer_rtx; | ||
1526 | poly_int64 sp_offset = offset; | ||
1527 | |||
1528 | @@ -9214,9 +9216,7 @@ aarch64_restore_callee_saves (poly_int64 bytes_below_sp, unsigned start, | ||
1529 | |||
1530 | machine_mode mode = aarch64_reg_save_mode (regno); | ||
1531 | reg = gen_rtx_REG (mode, regno); | ||
1532 | - offset = (frame.reg_offset[regno] | ||
1533 | - + frame.bytes_below_saved_regs | ||
1534 | - - bytes_below_sp); | ||
1535 | + offset = frame.reg_offset[regno] - bytes_below_sp; | ||
1536 | rtx base_rtx = stack_pointer_rtx; | ||
1537 | if (mode == VNx2DImode && BYTES_BIG_ENDIAN) | ||
1538 | aarch64_adjust_sve_callee_save_base (mode, base_rtx, anchor_reg, | ||
1539 | @@ -9355,14 +9355,12 @@ aarch64_get_separate_components (void) | ||
1540 | it as a stack probe for -fstack-clash-protection. */ | ||
1541 | if (flag_stack_clash_protection | ||
1542 | && maybe_ne (frame.below_hard_fp_saved_regs_size, 0) | ||
1543 | - && known_eq (offset, 0)) | ||
1544 | + && known_eq (offset, frame.bytes_below_saved_regs)) | ||
1545 | continue; | ||
1546 | |||
1547 | /* Get the offset relative to the register we'll use. */ | ||
1548 | if (frame_pointer_needed) | ||
1549 | - offset -= frame.below_hard_fp_saved_regs_size; | ||
1550 | - else | ||
1551 | - offset += frame.bytes_below_saved_regs; | ||
1552 | + offset -= frame.bytes_below_hard_fp; | ||
1553 | |||
1554 | /* Check that we can access the stack slot of the register with one | ||
1555 | direct load with no adjustments needed. */ | ||
1556 | @@ -9509,9 +9507,7 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
1557 | rtx reg = gen_rtx_REG (mode, regno); | ||
1558 | poly_int64 offset = frame.reg_offset[regno]; | ||
1559 | if (frame_pointer_needed) | ||
1560 | - offset -= frame.below_hard_fp_saved_regs_size; | ||
1561 | - else | ||
1562 | - offset += frame.bytes_below_saved_regs; | ||
1563 | + offset -= frame.bytes_below_hard_fp; | ||
1564 | |||
1565 | rtx addr = plus_constant (Pmode, ptr_reg, offset); | ||
1566 | rtx mem = gen_frame_mem (mode, addr); | ||
1567 | @@ -9563,9 +9559,7 @@ aarch64_process_components (sbitmap components, bool prologue_p) | ||
1568 | /* REGNO2 can be saved/restored in a pair with REGNO. */ | ||
1569 | rtx reg2 = gen_rtx_REG (mode, regno2); | ||
1570 | if (frame_pointer_needed) | ||
1571 | - offset2 -= frame.below_hard_fp_saved_regs_size; | ||
1572 | - else | ||
1573 | - offset2 += frame.bytes_below_saved_regs; | ||
1574 | + offset2 -= frame.bytes_below_hard_fp; | ||
1575 | rtx addr2 = plus_constant (Pmode, ptr_reg, offset2); | ||
1576 | rtx mem2 = gen_frame_mem (mode, addr2); | ||
1577 | rtx set2 = prologue_p ? gen_rtx_SET (mem2, reg2) | ||
1578 | @@ -9681,7 +9675,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
1579 | if (final_adjustment_p | ||
1580 | && known_eq (frame.below_hard_fp_saved_regs_size, 0)) | ||
1581 | { | ||
1582 | - poly_int64 lr_offset = frame.reg_offset[LR_REGNUM]; | ||
1583 | + poly_int64 lr_offset = (frame.reg_offset[LR_REGNUM] | ||
1584 | + - frame.bytes_below_saved_regs); | ||
1585 | if (known_ge (lr_offset, 0)) | ||
1586 | min_probe_threshold -= lr_offset.to_constant (); | ||
1587 | else | ||
1588 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
1589 | index 108a5731b0d..c8becb098c8 100644 | ||
1590 | --- a/gcc/config/aarch64/aarch64.h | ||
1591 | +++ b/gcc/config/aarch64/aarch64.h | ||
1592 | @@ -766,6 +766,9 @@ extern enum aarch64_processor aarch64_tune; | ||
1593 | #ifdef HAVE_POLY_INT_H | ||
1594 | struct GTY (()) aarch64_frame | ||
1595 | { | ||
1596 | + /* The offset from the bottom of the static frame (the bottom of the | ||
1597 | + outgoing arguments) of each register save slot, or -2 if no save is | ||
1598 | + needed. */ | ||
1599 | poly_int64 reg_offset[LAST_SAVED_REGNUM + 1]; | ||
1600 | |||
1601 | /* The number of extra stack bytes taken up by register varargs. | ||
1602 | -- | ||
1603 | 2.34.1 | ||
1604 | |||
1605 | |||
1606 | From 79faabda181d0d9fd29a3cf5726ba65bdee945b5 Mon Sep 17 00:00:00 2001 | ||
1607 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1608 | Date: Tue, 12 Sep 2023 16:07:17 +0100 | ||
1609 | Subject: [PATCH 12/19] aarch64: Simplify top of frame allocation | ||
1610 | |||
1611 | After previous patches, it no longer really makes sense to allocate | ||
1612 | the top of the frame in terms of varargs_and_saved_regs_size and | ||
1613 | saved_regs_and_above. | ||
1614 | |||
1615 | gcc/ | ||
1616 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Simplify | ||
1617 | the allocation of the top of the frame. | ||
1618 | --- | ||
1619 | gcc/config/aarch64/aarch64.cc | 23 ++++++++--------------- | ||
1620 | 1 file changed, 8 insertions(+), 15 deletions(-) | ||
1621 | |||
1622 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1623 | index af99807ef8a..31b00094c2a 100644 | ||
1624 | --- a/gcc/config/aarch64/aarch64.cc | ||
1625 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1626 | @@ -8586,23 +8586,16 @@ aarch64_layout_frame (void) | ||
1627 | |||
1628 | frame.saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
1629 | |||
1630 | - poly_int64 varargs_and_saved_regs_size | ||
1631 | - = frame.saved_regs_size + frame.saved_varargs_size; | ||
1632 | - | ||
1633 | - poly_int64 saved_regs_and_above | ||
1634 | - = aligned_upper_bound (varargs_and_saved_regs_size | ||
1635 | - + get_frame_size (), | ||
1636 | - STACK_BOUNDARY / BITS_PER_UNIT); | ||
1637 | - | ||
1638 | - frame.bytes_above_hard_fp | ||
1639 | - = saved_regs_and_above - frame.below_hard_fp_saved_regs_size; | ||
1640 | + offset += get_frame_size (); | ||
1641 | + offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
1642 | + auto top_of_locals = offset; | ||
1643 | |||
1644 | - /* Both these values are already aligned. */ | ||
1645 | - gcc_assert (multiple_p (frame.bytes_below_saved_regs, | ||
1646 | - STACK_BOUNDARY / BITS_PER_UNIT)); | ||
1647 | - frame.frame_size = saved_regs_and_above + frame.bytes_below_saved_regs; | ||
1648 | + offset += frame.saved_varargs_size; | ||
1649 | + gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT)); | ||
1650 | + frame.frame_size = offset; | ||
1651 | |||
1652 | - frame.bytes_above_locals = frame.saved_varargs_size; | ||
1653 | + frame.bytes_above_hard_fp = frame.frame_size - frame.bytes_below_hard_fp; | ||
1654 | + frame.bytes_above_locals = frame.frame_size - top_of_locals; | ||
1655 | |||
1656 | frame.initial_adjust = 0; | ||
1657 | frame.final_adjust = 0; | ||
1658 | -- | ||
1659 | 2.34.1 | ||
1660 | |||
1661 | |||
1662 | From 4e62049e403b141e6f916176160dac8cbd65fe47 Mon Sep 17 00:00:00 2001 | ||
1663 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1664 | Date: Tue, 12 Sep 2023 16:07:18 +0100 | ||
1665 | Subject: [PATCH 13/19] aarch64: Minor initial adjustment tweak | ||
1666 | |||
1667 | This patch just changes a calculation of initial_adjust | ||
1668 | to one that makes it slightly more obvious that the total | ||
1669 | adjustment is frame.frame_size. | ||
1670 | |||
1671 | gcc/ | ||
1672 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Tweak | ||
1673 | calculation of initial_adjust for frames in which all saves | ||
1674 | are SVE saves. | ||
1675 | --- | ||
1676 | gcc/config/aarch64/aarch64.cc | 5 ++--- | ||
1677 | 1 file changed, 2 insertions(+), 3 deletions(-) | ||
1678 | |||
1679 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1680 | index 31b00094c2a..1aa79da0673 100644 | ||
1681 | --- a/gcc/config/aarch64/aarch64.cc | ||
1682 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1683 | @@ -8675,11 +8675,10 @@ aarch64_layout_frame (void) | ||
1684 | { | ||
1685 | /* Frame in which all saves are SVE saves: | ||
1686 | |||
1687 | - sub sp, sp, hard_fp_offset + below_hard_fp_saved_regs_size | ||
1688 | + sub sp, sp, frame_size - bytes_below_saved_regs | ||
1689 | save SVE registers relative to SP | ||
1690 | sub sp, sp, bytes_below_saved_regs */ | ||
1691 | - frame.initial_adjust = (frame.bytes_above_hard_fp | ||
1692 | - + frame.below_hard_fp_saved_regs_size); | ||
1693 | + frame.initial_adjust = frame.frame_size - frame.bytes_below_saved_regs; | ||
1694 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
1695 | } | ||
1696 | else if (frame.bytes_above_hard_fp.is_constant (&const_above_fp) | ||
1697 | -- | ||
1698 | 2.34.1 | ||
1699 | |||
1700 | |||
1701 | From aaa1a0a5912d9e5d571e5f1c6f09ceac99544ab5 Mon Sep 17 00:00:00 2001 | ||
1702 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1703 | Date: Tue, 12 Sep 2023 16:07:18 +0100 | ||
1704 | Subject: [PATCH 14/19] aarch64: Tweak stack clash boundary condition | ||
1705 | |||
1706 | The AArch64 ABI says that, when stack clash protection is used, | ||
1707 | there can be a maximum of 1KiB of unprobed space at sp on entry | ||
1708 | to a function. Therefore, we need to probe when allocating | ||
1709 | >= guard_size - 1KiB of data (>= rather than >). This is what | ||
1710 | GCC does. | ||
1711 | |||
1712 | If an allocation is exactly guard_size bytes, it is enough to allocate | ||
1713 | those bytes and probe once at offset 1024. It isn't possible to use a | ||
1714 | single probe at any other offset: higher would conmplicate later code, | ||
1715 | by leaving more unprobed space than usual, while lower would risk | ||
1716 | leaving an entire page unprobed. For simplicity, the code probes all | ||
1717 | allocations at offset 1024. | ||
1718 | |||
1719 | Some register saves also act as probes. If we need to allocate | ||
1720 | more space below the last such register save probe, we need to | ||
1721 | probe the allocation if it is > 1KiB. Again, this allocation is | ||
1722 | then sometimes (but not always) probed at offset 1024. This sort of | ||
1723 | allocation is currently only used for outgoing arguments, which are | ||
1724 | rarely this big. | ||
1725 | |||
1726 | However, the code also probed if this final outgoing-arguments | ||
1727 | allocation was == 1KiB, rather than just > 1KiB. This isn't | ||
1728 | necessary, since the register save then probes at offset 1024 | ||
1729 | as required. Continuing to probe allocations of exactly 1KiB | ||
1730 | would complicate later patches. | ||
1731 | |||
1732 | gcc/ | ||
1733 | * config/aarch64/aarch64.cc (aarch64_allocate_and_probe_stack_space): | ||
1734 | Don't probe final allocations that are exactly 1KiB in size (after | ||
1735 | unprobed space above the final allocation has been deducted). | ||
1736 | |||
1737 | gcc/testsuite/ | ||
1738 | * gcc.target/aarch64/stack-check-prologue-17.c: New test. | ||
1739 | --- | ||
1740 | gcc/config/aarch64/aarch64.cc | 4 +- | ||
1741 | .../aarch64/stack-check-prologue-17.c | 55 +++++++++++++++++++ | ||
1742 | 2 files changed, 58 insertions(+), 1 deletion(-) | ||
1743 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
1744 | |||
1745 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1746 | index 1aa79da0673..5cad847977a 100644 | ||
1747 | --- a/gcc/config/aarch64/aarch64.cc | ||
1748 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1749 | @@ -9648,9 +9648,11 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
1750 | HOST_WIDE_INT guard_size | ||
1751 | = 1 << param_stack_clash_protection_guard_size; | ||
1752 | HOST_WIDE_INT guard_used_by_caller = STACK_CLASH_CALLER_GUARD; | ||
1753 | + HOST_WIDE_INT byte_sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT; | ||
1754 | + gcc_assert (multiple_p (poly_size, byte_sp_alignment)); | ||
1755 | HOST_WIDE_INT min_probe_threshold | ||
1756 | = (final_adjustment_p | ||
1757 | - ? guard_used_by_caller | ||
1758 | + ? guard_used_by_caller + byte_sp_alignment | ||
1759 | : guard_size - guard_used_by_caller); | ||
1760 | /* When doing the final adjustment for the outgoing arguments, take into | ||
1761 | account any unprobed space there is above the current SP. There are | ||
1762 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
1763 | new file mode 100644 | ||
1764 | index 00000000000..0d8a25d73a2 | ||
1765 | --- /dev/null | ||
1766 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
1767 | @@ -0,0 +1,55 @@ | ||
1768 | +/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12" } */ | ||
1769 | +/* { dg-final { check-function-bodies "**" "" } } */ | ||
1770 | + | ||
1771 | +void f(int, ...); | ||
1772 | +void g(); | ||
1773 | + | ||
1774 | +/* | ||
1775 | +** test1: | ||
1776 | +** ... | ||
1777 | +** str x30, \[sp\] | ||
1778 | +** sub sp, sp, #1024 | ||
1779 | +** cbnz w0, .* | ||
1780 | +** bl g | ||
1781 | +** ... | ||
1782 | +*/ | ||
1783 | +int test1(int z) { | ||
1784 | + __uint128_t x = 0; | ||
1785 | + int y[0x400]; | ||
1786 | + if (z) | ||
1787 | + { | ||
1788 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
1789 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1790 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1791 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1792 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x); | ||
1793 | + } | ||
1794 | + g(); | ||
1795 | + return 1; | ||
1796 | +} | ||
1797 | + | ||
1798 | +/* | ||
1799 | +** test2: | ||
1800 | +** ... | ||
1801 | +** str x30, \[sp\] | ||
1802 | +** sub sp, sp, #1040 | ||
1803 | +** str xzr, \[sp\] | ||
1804 | +** cbnz w0, .* | ||
1805 | +** bl g | ||
1806 | +** ... | ||
1807 | +*/ | ||
1808 | +int test2(int z) { | ||
1809 | + __uint128_t x = 0; | ||
1810 | + int y[0x400]; | ||
1811 | + if (z) | ||
1812 | + { | ||
1813 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
1814 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1815 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1816 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1817 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
1818 | + x); | ||
1819 | + } | ||
1820 | + g(); | ||
1821 | + return 1; | ||
1822 | +} | ||
1823 | -- | ||
1824 | 2.34.1 | ||
1825 | |||
1826 | |||
1827 | From 8433953434a7b58c0923140d39eb3c5988c1d097 Mon Sep 17 00:00:00 2001 | ||
1828 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
1829 | Date: Tue, 12 Sep 2023 16:07:19 +0100 | ||
1830 | Subject: [PATCH 15/19] aarch64: Put LR save probe in first 16 bytes | ||
1831 | |||
1832 | -fstack-clash-protection uses the save of LR as a probe for the next | ||
1833 | allocation. The next allocation could be: | ||
1834 | |||
1835 | * another part of the static frame, e.g. when allocating SVE save slots | ||
1836 | or outgoing arguments | ||
1837 | |||
1838 | * an alloca in the same function | ||
1839 | |||
1840 | * an allocation made by a callee function | ||
1841 | |||
1842 | However, when -fomit-frame-pointer is used, the LR save slot is placed | ||
1843 | above the other GPR save slots. It could therefore be up to 80 bytes | ||
1844 | above the base of the GPR save area (which is also the hard fp address). | ||
1845 | |||
1846 | aarch64_allocate_and_probe_stack_space took this into account when | ||
1847 | deciding how much subsequent space could be allocated without needing | ||
1848 | a probe. However, it interacted badly with: | ||
1849 | |||
1850 | /* If doing a small final adjustment, we always probe at offset 0. | ||
1851 | This is done to avoid issues when LR is not at position 0 or when | ||
1852 | the final adjustment is smaller than the probing offset. */ | ||
1853 | else if (final_adjustment_p && rounded_size == 0) | ||
1854 | residual_probe_offset = 0; | ||
1855 | |||
1856 | which forces any allocation that is smaller than the guard page size | ||
1857 | to be probed at offset 0 rather than the usual offset 1024. It was | ||
1858 | therefore possible to construct cases in which we had: | ||
1859 | |||
1860 | * a probe using LR at SP + 80 bytes (or some other value >= 16) | ||
1861 | * an allocation of the guard page size - 16 bytes | ||
1862 | * a probe at SP + 0 | ||
1863 | |||
1864 | which allocates guard page size + 64 consecutive unprobed bytes. | ||
1865 | |||
1866 | This patch requires the LR probe to be in the first 16 bytes of the | ||
1867 | save area when stack clash protection is active. Doing it | ||
1868 | unconditionally would cause code-quality regressions. | ||
1869 | |||
1870 | Putting LR before other registers prevents push/pop allocation | ||
1871 | when shadow call stacks are enabled, since LR is restored | ||
1872 | separately from the other callee-saved registers. | ||
1873 | |||
1874 | The new comment doesn't say that the probe register is required | ||
1875 | to be LR, since a later patch removes that restriction. | ||
1876 | |||
1877 | gcc/ | ||
1878 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Ensure that | ||
1879 | the LR save slot is in the first 16 bytes of the register save area. | ||
1880 | Only form STP/LDP push/pop candidates if both registers are valid. | ||
1881 | (aarch64_allocate_and_probe_stack_space): Remove workaround for | ||
1882 | when LR was not in the first 16 bytes. | ||
1883 | |||
1884 | gcc/testsuite/ | ||
1885 | * gcc.target/aarch64/stack-check-prologue-18.c: New test. | ||
1886 | * gcc.target/aarch64/stack-check-prologue-19.c: Likewise. | ||
1887 | * gcc.target/aarch64/stack-check-prologue-20.c: Likewise. | ||
1888 | --- | ||
1889 | gcc/config/aarch64/aarch64.cc | 72 ++++++------- | ||
1890 | .../aarch64/stack-check-prologue-18.c | 100 ++++++++++++++++++ | ||
1891 | .../aarch64/stack-check-prologue-19.c | 100 ++++++++++++++++++ | ||
1892 | .../aarch64/stack-check-prologue-20.c | 3 + | ||
1893 | 4 files changed, 233 insertions(+), 42 deletions(-) | ||
1894 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
1895 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
1896 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c | ||
1897 | |||
1898 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
1899 | index 5cad847977a..a765f92329d 100644 | ||
1900 | --- a/gcc/config/aarch64/aarch64.cc | ||
1901 | +++ b/gcc/config/aarch64/aarch64.cc | ||
1902 | @@ -8534,26 +8534,34 @@ aarch64_layout_frame (void) | ||
1903 | bool saves_below_hard_fp_p | ||
1904 | = maybe_ne (frame.below_hard_fp_saved_regs_size, 0); | ||
1905 | frame.bytes_below_hard_fp = offset; | ||
1906 | + | ||
1907 | + auto allocate_gpr_slot = [&](unsigned int regno) | ||
1908 | + { | ||
1909 | + frame.reg_offset[regno] = offset; | ||
1910 | + if (frame.wb_push_candidate1 == INVALID_REGNUM) | ||
1911 | + frame.wb_push_candidate1 = regno; | ||
1912 | + else if (frame.wb_push_candidate2 == INVALID_REGNUM) | ||
1913 | + frame.wb_push_candidate2 = regno; | ||
1914 | + offset += UNITS_PER_WORD; | ||
1915 | + }; | ||
1916 | + | ||
1917 | if (frame.emit_frame_chain) | ||
1918 | { | ||
1919 | /* FP and LR are placed in the linkage record. */ | ||
1920 | - frame.reg_offset[R29_REGNUM] = offset; | ||
1921 | - frame.wb_push_candidate1 = R29_REGNUM; | ||
1922 | - frame.reg_offset[R30_REGNUM] = offset + UNITS_PER_WORD; | ||
1923 | - frame.wb_push_candidate2 = R30_REGNUM; | ||
1924 | - offset += 2 * UNITS_PER_WORD; | ||
1925 | + allocate_gpr_slot (R29_REGNUM); | ||
1926 | + allocate_gpr_slot (R30_REGNUM); | ||
1927 | } | ||
1928 | + else if (flag_stack_clash_protection | ||
1929 | + && known_eq (frame.reg_offset[R30_REGNUM], SLOT_REQUIRED)) | ||
1930 | + /* Put the LR save slot first, since it makes a good choice of probe | ||
1931 | + for stack clash purposes. The idea is that the link register usually | ||
1932 | + has to be saved before a call anyway, and so we lose little by | ||
1933 | + stopping it from being individually shrink-wrapped. */ | ||
1934 | + allocate_gpr_slot (R30_REGNUM); | ||
1935 | |||
1936 | for (regno = R0_REGNUM; regno <= R30_REGNUM; regno++) | ||
1937 | if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED)) | ||
1938 | - { | ||
1939 | - frame.reg_offset[regno] = offset; | ||
1940 | - if (frame.wb_push_candidate1 == INVALID_REGNUM) | ||
1941 | - frame.wb_push_candidate1 = regno; | ||
1942 | - else if (frame.wb_push_candidate2 == INVALID_REGNUM) | ||
1943 | - frame.wb_push_candidate2 = regno; | ||
1944 | - offset += UNITS_PER_WORD; | ||
1945 | - } | ||
1946 | + allocate_gpr_slot (regno); | ||
1947 | |||
1948 | poly_int64 max_int_offset = offset; | ||
1949 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
1950 | @@ -8631,10 +8639,13 @@ aarch64_layout_frame (void) | ||
1951 | max_push_offset to 0, because no registers are popped at this time, | ||
1952 | so callee_adjust cannot be adjusted. */ | ||
1953 | HOST_WIDE_INT max_push_offset = 0; | ||
1954 | - if (frame.wb_pop_candidate2 != INVALID_REGNUM) | ||
1955 | - max_push_offset = 512; | ||
1956 | - else if (frame.wb_pop_candidate1 != INVALID_REGNUM) | ||
1957 | - max_push_offset = 256; | ||
1958 | + if (frame.wb_pop_candidate1 != INVALID_REGNUM) | ||
1959 | + { | ||
1960 | + if (frame.wb_pop_candidate2 != INVALID_REGNUM) | ||
1961 | + max_push_offset = 512; | ||
1962 | + else | ||
1963 | + max_push_offset = 256; | ||
1964 | + } | ||
1965 | |||
1966 | HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp; | ||
1967 | HOST_WIDE_INT const_saved_regs_size; | ||
1968 | @@ -9654,29 +9665,6 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
1969 | = (final_adjustment_p | ||
1970 | ? guard_used_by_caller + byte_sp_alignment | ||
1971 | : guard_size - guard_used_by_caller); | ||
1972 | - /* When doing the final adjustment for the outgoing arguments, take into | ||
1973 | - account any unprobed space there is above the current SP. There are | ||
1974 | - two cases: | ||
1975 | - | ||
1976 | - - When saving SVE registers below the hard frame pointer, we force | ||
1977 | - the lowest save to take place in the prologue before doing the final | ||
1978 | - adjustment (i.e. we don't allow the save to be shrink-wrapped). | ||
1979 | - This acts as a probe at SP, so there is no unprobed space. | ||
1980 | - | ||
1981 | - - When there are no SVE register saves, we use the store of the link | ||
1982 | - register as a probe. We can't assume that LR was saved at position 0 | ||
1983 | - though, so treat any space below it as unprobed. */ | ||
1984 | - if (final_adjustment_p | ||
1985 | - && known_eq (frame.below_hard_fp_saved_regs_size, 0)) | ||
1986 | - { | ||
1987 | - poly_int64 lr_offset = (frame.reg_offset[LR_REGNUM] | ||
1988 | - - frame.bytes_below_saved_regs); | ||
1989 | - if (known_ge (lr_offset, 0)) | ||
1990 | - min_probe_threshold -= lr_offset.to_constant (); | ||
1991 | - else | ||
1992 | - gcc_assert (!flag_stack_clash_protection || known_eq (poly_size, 0)); | ||
1993 | - } | ||
1994 | - | ||
1995 | poly_int64 frame_size = frame.frame_size; | ||
1996 | |||
1997 | /* We should always have a positive probe threshold. */ | ||
1998 | @@ -9856,8 +9844,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
1999 | if (final_adjustment_p && rounded_size != 0) | ||
2000 | min_probe_threshold = 0; | ||
2001 | /* If doing a small final adjustment, we always probe at offset 0. | ||
2002 | - This is done to avoid issues when LR is not at position 0 or when | ||
2003 | - the final adjustment is smaller than the probing offset. */ | ||
2004 | + This is done to avoid issues when the final adjustment is smaller | ||
2005 | + than the probing offset. */ | ||
2006 | else if (final_adjustment_p && rounded_size == 0) | ||
2007 | residual_probe_offset = 0; | ||
2008 | |||
2009 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
2010 | new file mode 100644 | ||
2011 | index 00000000000..82447d20fff | ||
2012 | --- /dev/null | ||
2013 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
2014 | @@ -0,0 +1,100 @@ | ||
2015 | +/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12" } */ | ||
2016 | +/* { dg-final { check-function-bodies "**" "" } } */ | ||
2017 | + | ||
2018 | +void f(int, ...); | ||
2019 | +void g(); | ||
2020 | + | ||
2021 | +/* | ||
2022 | +** test1: | ||
2023 | +** ... | ||
2024 | +** str x30, \[sp\] | ||
2025 | +** sub sp, sp, #4064 | ||
2026 | +** str xzr, \[sp\] | ||
2027 | +** cbnz w0, .* | ||
2028 | +** bl g | ||
2029 | +** ... | ||
2030 | +** str x26, \[sp, #?4128\] | ||
2031 | +** ... | ||
2032 | +*/ | ||
2033 | +int test1(int z) { | ||
2034 | + __uint128_t x = 0; | ||
2035 | + int y[0x400]; | ||
2036 | + if (z) | ||
2037 | + { | ||
2038 | + asm volatile ("" ::: | ||
2039 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2040 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2041 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2042 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2043 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2044 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2045 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2046 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2047 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2048 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2049 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2050 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2051 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2052 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2053 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2054 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2055 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2056 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x); | ||
2057 | + } | ||
2058 | + g(); | ||
2059 | + return 1; | ||
2060 | +} | ||
2061 | + | ||
2062 | +/* | ||
2063 | +** test2: | ||
2064 | +** ... | ||
2065 | +** str x30, \[sp\] | ||
2066 | +** sub sp, sp, #1040 | ||
2067 | +** str xzr, \[sp\] | ||
2068 | +** cbnz w0, .* | ||
2069 | +** bl g | ||
2070 | +** ... | ||
2071 | +*/ | ||
2072 | +int test2(int z) { | ||
2073 | + __uint128_t x = 0; | ||
2074 | + int y[0x400]; | ||
2075 | + if (z) | ||
2076 | + { | ||
2077 | + asm volatile ("" ::: | ||
2078 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2079 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2080 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2081 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2082 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2083 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2084 | + x); | ||
2085 | + } | ||
2086 | + g(); | ||
2087 | + return 1; | ||
2088 | +} | ||
2089 | + | ||
2090 | +/* | ||
2091 | +** test3: | ||
2092 | +** ... | ||
2093 | +** str x30, \[sp\] | ||
2094 | +** sub sp, sp, #1024 | ||
2095 | +** cbnz w0, .* | ||
2096 | +** bl g | ||
2097 | +** ... | ||
2098 | +*/ | ||
2099 | +int test3(int z) { | ||
2100 | + __uint128_t x = 0; | ||
2101 | + int y[0x400]; | ||
2102 | + if (z) | ||
2103 | + { | ||
2104 | + asm volatile ("" ::: | ||
2105 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2106 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2107 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2108 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2109 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2110 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x); | ||
2111 | + } | ||
2112 | + g(); | ||
2113 | + return 1; | ||
2114 | +} | ||
2115 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
2116 | new file mode 100644 | ||
2117 | index 00000000000..73ac3e4e4eb | ||
2118 | --- /dev/null | ||
2119 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
2120 | @@ -0,0 +1,100 @@ | ||
2121 | +/* { dg-options "-O2 -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12 -fsanitize=shadow-call-stack -ffixed-x18" } */ | ||
2122 | +/* { dg-final { check-function-bodies "**" "" } } */ | ||
2123 | + | ||
2124 | +void f(int, ...); | ||
2125 | +void g(); | ||
2126 | + | ||
2127 | +/* | ||
2128 | +** test1: | ||
2129 | +** ... | ||
2130 | +** str x30, \[sp\] | ||
2131 | +** sub sp, sp, #4064 | ||
2132 | +** str xzr, \[sp\] | ||
2133 | +** cbnz w0, .* | ||
2134 | +** bl g | ||
2135 | +** ... | ||
2136 | +** str x26, \[sp, #?4128\] | ||
2137 | +** ... | ||
2138 | +*/ | ||
2139 | +int test1(int z) { | ||
2140 | + __uint128_t x = 0; | ||
2141 | + int y[0x400]; | ||
2142 | + if (z) | ||
2143 | + { | ||
2144 | + asm volatile ("" ::: | ||
2145 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2146 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2147 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2148 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2149 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2150 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2151 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2152 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2153 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2154 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2155 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2156 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2157 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2158 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2159 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2160 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2161 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2162 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x); | ||
2163 | + } | ||
2164 | + g(); | ||
2165 | + return 1; | ||
2166 | +} | ||
2167 | + | ||
2168 | +/* | ||
2169 | +** test2: | ||
2170 | +** ... | ||
2171 | +** str x30, \[sp\] | ||
2172 | +** sub sp, sp, #1040 | ||
2173 | +** str xzr, \[sp\] | ||
2174 | +** cbnz w0, .* | ||
2175 | +** bl g | ||
2176 | +** ... | ||
2177 | +*/ | ||
2178 | +int test2(int z) { | ||
2179 | + __uint128_t x = 0; | ||
2180 | + int y[0x400]; | ||
2181 | + if (z) | ||
2182 | + { | ||
2183 | + asm volatile ("" ::: | ||
2184 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2185 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2186 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2187 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2188 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2189 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2190 | + x); | ||
2191 | + } | ||
2192 | + g(); | ||
2193 | + return 1; | ||
2194 | +} | ||
2195 | + | ||
2196 | +/* | ||
2197 | +** test3: | ||
2198 | +** ... | ||
2199 | +** str x30, \[sp\] | ||
2200 | +** sub sp, sp, #1024 | ||
2201 | +** cbnz w0, .* | ||
2202 | +** bl g | ||
2203 | +** ... | ||
2204 | +*/ | ||
2205 | +int test3(int z) { | ||
2206 | + __uint128_t x = 0; | ||
2207 | + int y[0x400]; | ||
2208 | + if (z) | ||
2209 | + { | ||
2210 | + asm volatile ("" ::: | ||
2211 | + "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26"); | ||
2212 | + f(0, 0, 0, 0, 0, 0, 0, &y, | ||
2213 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2214 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2215 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, | ||
2216 | + x, x, x, x, x, x, x, x, x, x, x, x, x, x, x, x); | ||
2217 | + } | ||
2218 | + g(); | ||
2219 | + return 1; | ||
2220 | +} | ||
2221 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c | ||
2222 | new file mode 100644 | ||
2223 | index 00000000000..690aae8dfd5 | ||
2224 | --- /dev/null | ||
2225 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-20.c | ||
2226 | @@ -0,0 +1,3 @@ | ||
2227 | +/* { dg-options "-O2 -fstack-protector-all -fstack-clash-protection -fomit-frame-pointer --param stack-clash-protection-guard-size=12 -fsanitize=shadow-call-stack -ffixed-x18" } */ | ||
2228 | + | ||
2229 | +#include "stack-check-prologue-19.c" | ||
2230 | -- | ||
2231 | 2.34.1 | ||
2232 | |||
2233 | |||
2234 | From eea1759073e09dd1aefbc9a881601ab1eebfdd18 Mon Sep 17 00:00:00 2001 | ||
2235 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
2236 | Date: Tue, 12 Sep 2023 16:07:19 +0100 | ||
2237 | Subject: [PATCH 16/19] aarch64: Simplify probe of final frame allocation | ||
2238 | |||
2239 | Previous patches ensured that the final frame allocation only needs | ||
2240 | a probe when the size is strictly greater than 1KiB. It's therefore | ||
2241 | safe to use the normal 1024 probe offset in all cases. | ||
2242 | |||
2243 | The main motivation for doing this is to simplify the code and | ||
2244 | remove the number of special cases. | ||
2245 | |||
2246 | gcc/ | ||
2247 | * config/aarch64/aarch64.cc (aarch64_allocate_and_probe_stack_space): | ||
2248 | Always probe the residual allocation at offset 1024, asserting | ||
2249 | that that is in range. | ||
2250 | |||
2251 | gcc/testsuite/ | ||
2252 | * gcc.target/aarch64/stack-check-prologue-17.c: Expect the probe | ||
2253 | to be at offset 1024 rather than offset 0. | ||
2254 | * gcc.target/aarch64/stack-check-prologue-18.c: Likewise. | ||
2255 | * gcc.target/aarch64/stack-check-prologue-19.c: Likewise. | ||
2256 | --- | ||
2257 | gcc/config/aarch64/aarch64.cc | 12 ++++-------- | ||
2258 | .../gcc.target/aarch64/stack-check-prologue-17.c | 2 +- | ||
2259 | .../gcc.target/aarch64/stack-check-prologue-18.c | 4 ++-- | ||
2260 | .../gcc.target/aarch64/stack-check-prologue-19.c | 4 ++-- | ||
2261 | 4 files changed, 9 insertions(+), 13 deletions(-) | ||
2262 | |||
2263 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
2264 | index a765f92329d..37809a306f7 100644 | ||
2265 | --- a/gcc/config/aarch64/aarch64.cc | ||
2266 | +++ b/gcc/config/aarch64/aarch64.cc | ||
2267 | @@ -9838,16 +9838,12 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
2268 | are still safe. */ | ||
2269 | if (residual) | ||
2270 | { | ||
2271 | - HOST_WIDE_INT residual_probe_offset = guard_used_by_caller; | ||
2272 | + gcc_assert (guard_used_by_caller + byte_sp_alignment <= size); | ||
2273 | + | ||
2274 | /* If we're doing final adjustments, and we've done any full page | ||
2275 | allocations then any residual needs to be probed. */ | ||
2276 | if (final_adjustment_p && rounded_size != 0) | ||
2277 | min_probe_threshold = 0; | ||
2278 | - /* If doing a small final adjustment, we always probe at offset 0. | ||
2279 | - This is done to avoid issues when the final adjustment is smaller | ||
2280 | - than the probing offset. */ | ||
2281 | - else if (final_adjustment_p && rounded_size == 0) | ||
2282 | - residual_probe_offset = 0; | ||
2283 | |||
2284 | aarch64_sub_sp (temp1, temp2, residual, frame_related_p); | ||
2285 | if (residual >= min_probe_threshold) | ||
2286 | @@ -9858,8 +9854,8 @@ aarch64_allocate_and_probe_stack_space (rtx temp1, rtx temp2, | ||
2287 | HOST_WIDE_INT_PRINT_DEC " bytes, probing will be required." | ||
2288 | "\n", residual); | ||
2289 | |||
2290 | - emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, | ||
2291 | - residual_probe_offset)); | ||
2292 | + emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx, | ||
2293 | + guard_used_by_caller)); | ||
2294 | emit_insn (gen_blockage ()); | ||
2295 | } | ||
2296 | } | ||
2297 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
2298 | index 0d8a25d73a2..f0ec1389771 100644 | ||
2299 | --- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
2300 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-17.c | ||
2301 | @@ -33,7 +33,7 @@ int test1(int z) { | ||
2302 | ** ... | ||
2303 | ** str x30, \[sp\] | ||
2304 | ** sub sp, sp, #1040 | ||
2305 | -** str xzr, \[sp\] | ||
2306 | +** str xzr, \[sp, #?1024\] | ||
2307 | ** cbnz w0, .* | ||
2308 | ** bl g | ||
2309 | ** ... | ||
2310 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
2311 | index 82447d20fff..6383bec5ebc 100644 | ||
2312 | --- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
2313 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-18.c | ||
2314 | @@ -9,7 +9,7 @@ void g(); | ||
2315 | ** ... | ||
2316 | ** str x30, \[sp\] | ||
2317 | ** sub sp, sp, #4064 | ||
2318 | -** str xzr, \[sp\] | ||
2319 | +** str xzr, \[sp, #?1024\] | ||
2320 | ** cbnz w0, .* | ||
2321 | ** bl g | ||
2322 | ** ... | ||
2323 | @@ -50,7 +50,7 @@ int test1(int z) { | ||
2324 | ** ... | ||
2325 | ** str x30, \[sp\] | ||
2326 | ** sub sp, sp, #1040 | ||
2327 | -** str xzr, \[sp\] | ||
2328 | +** str xzr, \[sp, #?1024\] | ||
2329 | ** cbnz w0, .* | ||
2330 | ** bl g | ||
2331 | ** ... | ||
2332 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
2333 | index 73ac3e4e4eb..562039b5e9b 100644 | ||
2334 | --- a/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
2335 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-check-prologue-19.c | ||
2336 | @@ -9,7 +9,7 @@ void g(); | ||
2337 | ** ... | ||
2338 | ** str x30, \[sp\] | ||
2339 | ** sub sp, sp, #4064 | ||
2340 | -** str xzr, \[sp\] | ||
2341 | +** str xzr, \[sp, #?1024\] | ||
2342 | ** cbnz w0, .* | ||
2343 | ** bl g | ||
2344 | ** ... | ||
2345 | @@ -50,7 +50,7 @@ int test1(int z) { | ||
2346 | ** ... | ||
2347 | ** str x30, \[sp\] | ||
2348 | ** sub sp, sp, #1040 | ||
2349 | -** str xzr, \[sp\] | ||
2350 | +** str xzr, \[sp, #?1024\] | ||
2351 | ** cbnz w0, .* | ||
2352 | ** bl g | ||
2353 | ** ... | ||
2354 | -- | ||
2355 | 2.34.1 | ||
2356 | |||
2357 | |||
2358 | From 96d85187c3b9c9a7efc2fd698c3d452e80d8aa47 Mon Sep 17 00:00:00 2001 | ||
2359 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
2360 | Date: Tue, 12 Sep 2023 16:07:20 +0100 | ||
2361 | Subject: [PATCH 17/19] aarch64: Explicitly record probe registers in frame | ||
2362 | info | ||
2363 | |||
2364 | The stack frame is currently divided into three areas: | ||
2365 | |||
2366 | A: the area above the hard frame pointer | ||
2367 | B: the SVE saves below the hard frame pointer | ||
2368 | C: the outgoing arguments | ||
2369 | |||
2370 | If the stack frame is allocated in one chunk, the allocation needs a | ||
2371 | probe if the frame size is >= guard_size - 1KiB. In addition, if the | ||
2372 | function is not a leaf function, it must probe an address no more than | ||
2373 | 1KiB above the outgoing SP. We ensured the second condition by | ||
2374 | |||
2375 | (1) using single-chunk allocations for non-leaf functions only if | ||
2376 | the link register save slot is within 512 bytes of the bottom | ||
2377 | of the frame; and | ||
2378 | |||
2379 | (2) using the link register save as a probe (meaning, for instance, | ||
2380 | that it can't be individually shrink wrapped) | ||
2381 | |||
2382 | If instead the stack is allocated in multiple chunks, then: | ||
2383 | |||
2384 | * an allocation involving only the outgoing arguments (C above) requires | ||
2385 | a probe if the allocation size is > 1KiB | ||
2386 | |||
2387 | * any other allocation requires a probe if the allocation size | ||
2388 | is >= guard_size - 1KiB | ||
2389 | |||
2390 | * second and subsequent allocations require the previous allocation | ||
2391 | to probe at the bottom of the allocated area, regardless of the size | ||
2392 | of that previous allocation | ||
2393 | |||
2394 | The final point means that, unlike for single allocations, | ||
2395 | it can be necessary to have both a non-SVE register probe and | ||
2396 | an SVE register probe. For example: | ||
2397 | |||
2398 | * allocate A, probe using a non-SVE register save | ||
2399 | * allocate B, probe using an SVE register save | ||
2400 | * allocate C | ||
2401 | |||
2402 | The non-SVE register used in this case was again the link register. | ||
2403 | It was previously used even if the link register save slot was some | ||
2404 | bytes above the bottom of the non-SVE register saves, but an earlier | ||
2405 | patch avoided that by putting the link register save slot first. | ||
2406 | |||
2407 | As a belt-and-braces fix, this patch explicitly records which | ||
2408 | probe registers we're using and allows the non-SVE probe to be | ||
2409 | whichever register comes first (as for SVE). | ||
2410 | |||
2411 | The patch also avoids unnecessary probes in sve/pcs/stack_clash_3.c. | ||
2412 | |||
2413 | gcc/ | ||
2414 | * config/aarch64/aarch64.h (aarch64_frame::sve_save_and_probe) | ||
2415 | (aarch64_frame::hard_fp_save_and_probe): New fields. | ||
2416 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Initialize them. | ||
2417 | Rather than asserting that a leaf function saves LR, instead assert | ||
2418 | that a leaf function saves something. | ||
2419 | (aarch64_get_separate_components): Prevent the chosen probe | ||
2420 | registers from being individually shrink-wrapped. | ||
2421 | (aarch64_allocate_and_probe_stack_space): Remove workaround for | ||
2422 | probe registers that aren't at the bottom of the previous allocation. | ||
2423 | |||
2424 | gcc/testsuite/ | ||
2425 | * gcc.target/aarch64/sve/pcs/stack_clash_3.c: Avoid redundant probes. | ||
2426 | --- | ||
2427 | gcc/config/aarch64/aarch64.cc | 68 +++++++++++++++---- | ||
2428 | gcc/config/aarch64/aarch64.h | 8 +++ | ||
2429 | .../aarch64/sve/pcs/stack_clash_3.c | 6 +- | ||
2430 | 3 files changed, 64 insertions(+), 18 deletions(-) | ||
2431 | |||
2432 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
2433 | index 37809a306f7..6c59c39a639 100644 | ||
2434 | --- a/gcc/config/aarch64/aarch64.cc | ||
2435 | +++ b/gcc/config/aarch64/aarch64.cc | ||
2436 | @@ -8471,15 +8471,11 @@ aarch64_layout_frame (void) | ||
2437 | && !crtl->abi->clobbers_full_reg_p (regno)) | ||
2438 | frame.reg_offset[regno] = SLOT_REQUIRED; | ||
2439 | |||
2440 | - /* With stack-clash, LR must be saved in non-leaf functions. The saving of | ||
2441 | - LR counts as an implicit probe which allows us to maintain the invariant | ||
2442 | - described in the comment at expand_prologue. */ | ||
2443 | - gcc_assert (crtl->is_leaf | ||
2444 | - || maybe_ne (frame.reg_offset[R30_REGNUM], SLOT_NOT_REQUIRED)); | ||
2445 | |||
2446 | poly_int64 offset = crtl->outgoing_args_size; | ||
2447 | gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT)); | ||
2448 | frame.bytes_below_saved_regs = offset; | ||
2449 | + frame.sve_save_and_probe = INVALID_REGNUM; | ||
2450 | |||
2451 | /* Now assign stack slots for the registers. Start with the predicate | ||
2452 | registers, since predicate LDR and STR have a relatively small | ||
2453 | @@ -8487,6 +8483,8 @@ aarch64_layout_frame (void) | ||
2454 | for (regno = P0_REGNUM; regno <= P15_REGNUM; regno++) | ||
2455 | if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED)) | ||
2456 | { | ||
2457 | + if (frame.sve_save_and_probe == INVALID_REGNUM) | ||
2458 | + frame.sve_save_and_probe = regno; | ||
2459 | frame.reg_offset[regno] = offset; | ||
2460 | offset += BYTES_PER_SVE_PRED; | ||
2461 | } | ||
2462 | @@ -8524,6 +8522,8 @@ aarch64_layout_frame (void) | ||
2463 | for (regno = V0_REGNUM; regno <= V31_REGNUM; regno++) | ||
2464 | if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED)) | ||
2465 | { | ||
2466 | + if (frame.sve_save_and_probe == INVALID_REGNUM) | ||
2467 | + frame.sve_save_and_probe = regno; | ||
2468 | frame.reg_offset[regno] = offset; | ||
2469 | offset += vector_save_size; | ||
2470 | } | ||
2471 | @@ -8533,10 +8533,18 @@ aarch64_layout_frame (void) | ||
2472 | frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2473 | bool saves_below_hard_fp_p | ||
2474 | = maybe_ne (frame.below_hard_fp_saved_regs_size, 0); | ||
2475 | + gcc_assert (!saves_below_hard_fp_p | ||
2476 | + || (frame.sve_save_and_probe != INVALID_REGNUM | ||
2477 | + && known_eq (frame.reg_offset[frame.sve_save_and_probe], | ||
2478 | + frame.bytes_below_saved_regs))); | ||
2479 | + | ||
2480 | frame.bytes_below_hard_fp = offset; | ||
2481 | + frame.hard_fp_save_and_probe = INVALID_REGNUM; | ||
2482 | |||
2483 | auto allocate_gpr_slot = [&](unsigned int regno) | ||
2484 | { | ||
2485 | + if (frame.hard_fp_save_and_probe == INVALID_REGNUM) | ||
2486 | + frame.hard_fp_save_and_probe = regno; | ||
2487 | frame.reg_offset[regno] = offset; | ||
2488 | if (frame.wb_push_candidate1 == INVALID_REGNUM) | ||
2489 | frame.wb_push_candidate1 = regno; | ||
2490 | @@ -8570,6 +8578,8 @@ aarch64_layout_frame (void) | ||
2491 | for (regno = V0_REGNUM; regno <= V31_REGNUM; regno++) | ||
2492 | if (known_eq (frame.reg_offset[regno], SLOT_REQUIRED)) | ||
2493 | { | ||
2494 | + if (frame.hard_fp_save_and_probe == INVALID_REGNUM) | ||
2495 | + frame.hard_fp_save_and_probe = regno; | ||
2496 | /* If there is an alignment gap between integer and fp callee-saves, | ||
2497 | allocate the last fp register to it if possible. */ | ||
2498 | if (regno == last_fp_reg | ||
2499 | @@ -8593,6 +8603,17 @@ aarch64_layout_frame (void) | ||
2500 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2501 | |||
2502 | frame.saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2503 | + gcc_assert (known_eq (frame.saved_regs_size, | ||
2504 | + frame.below_hard_fp_saved_regs_size) | ||
2505 | + || (frame.hard_fp_save_and_probe != INVALID_REGNUM | ||
2506 | + && known_eq (frame.reg_offset[frame.hard_fp_save_and_probe], | ||
2507 | + frame.bytes_below_hard_fp))); | ||
2508 | + | ||
2509 | + /* With stack-clash, a register must be saved in non-leaf functions. | ||
2510 | + The saving of the bottommost register counts as an implicit probe, | ||
2511 | + which allows us to maintain the invariant described in the comment | ||
2512 | + at expand_prologue. */ | ||
2513 | + gcc_assert (crtl->is_leaf || maybe_ne (frame.saved_regs_size, 0)); | ||
2514 | |||
2515 | offset += get_frame_size (); | ||
2516 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2517 | @@ -8723,6 +8744,25 @@ aarch64_layout_frame (void) | ||
2518 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
2519 | } | ||
2520 | |||
2521 | + /* The frame is allocated in pieces, with each non-final piece | ||
2522 | + including a register save at offset 0 that acts as a probe for | ||
2523 | + the following piece. In addition, the save of the bottommost register | ||
2524 | + acts as a probe for callees and allocas. Roll back any probes that | ||
2525 | + aren't needed. | ||
2526 | + | ||
2527 | + A probe isn't needed if it is associated with the final allocation | ||
2528 | + (including callees and allocas) that happens before the epilogue is | ||
2529 | + executed. */ | ||
2530 | + if (crtl->is_leaf | ||
2531 | + && !cfun->calls_alloca | ||
2532 | + && known_eq (frame.final_adjust, 0)) | ||
2533 | + { | ||
2534 | + if (maybe_ne (frame.sve_callee_adjust, 0)) | ||
2535 | + frame.sve_save_and_probe = INVALID_REGNUM; | ||
2536 | + else | ||
2537 | + frame.hard_fp_save_and_probe = INVALID_REGNUM; | ||
2538 | + } | ||
2539 | + | ||
2540 | /* Make sure the individual adjustments add up to the full frame size. */ | ||
2541 | gcc_assert (known_eq (frame.initial_adjust | ||
2542 | + frame.callee_adjust | ||
2543 | @@ -9354,13 +9394,6 @@ aarch64_get_separate_components (void) | ||
2544 | |||
2545 | poly_int64 offset = frame.reg_offset[regno]; | ||
2546 | |||
2547 | - /* If the register is saved in the first SVE save slot, we use | ||
2548 | - it as a stack probe for -fstack-clash-protection. */ | ||
2549 | - if (flag_stack_clash_protection | ||
2550 | - && maybe_ne (frame.below_hard_fp_saved_regs_size, 0) | ||
2551 | - && known_eq (offset, frame.bytes_below_saved_regs)) | ||
2552 | - continue; | ||
2553 | - | ||
2554 | /* Get the offset relative to the register we'll use. */ | ||
2555 | if (frame_pointer_needed) | ||
2556 | offset -= frame.bytes_below_hard_fp; | ||
2557 | @@ -9395,6 +9428,13 @@ aarch64_get_separate_components (void) | ||
2558 | |||
2559 | bitmap_clear_bit (components, LR_REGNUM); | ||
2560 | bitmap_clear_bit (components, SP_REGNUM); | ||
2561 | + if (flag_stack_clash_protection) | ||
2562 | + { | ||
2563 | + if (frame.sve_save_and_probe != INVALID_REGNUM) | ||
2564 | + bitmap_clear_bit (components, frame.sve_save_and_probe); | ||
2565 | + if (frame.hard_fp_save_and_probe != INVALID_REGNUM) | ||
2566 | + bitmap_clear_bit (components, frame.hard_fp_save_and_probe); | ||
2567 | + } | ||
2568 | |||
2569 | return components; | ||
2570 | } | ||
2571 | @@ -9931,8 +9971,8 @@ aarch64_epilogue_uses (int regno) | ||
2572 | When probing is needed, we emit a probe at the start of the prologue | ||
2573 | and every PARAM_STACK_CLASH_PROTECTION_GUARD_SIZE bytes thereafter. | ||
2574 | |||
2575 | - We have to track how much space has been allocated and the only stores | ||
2576 | - to the stack we track as implicit probes are the FP/LR stores. | ||
2577 | + We can also use register saves as probes. These are stored in | ||
2578 | + sve_save_and_probe and hard_fp_save_and_probe. | ||
2579 | |||
2580 | For outgoing arguments we probe if the size is larger than 1KB, such that | ||
2581 | the ABI specified buffer is maintained for the next callee. | ||
2582 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
2583 | index c8becb098c8..fbfb73545ba 100644 | ||
2584 | --- a/gcc/config/aarch64/aarch64.h | ||
2585 | +++ b/gcc/config/aarch64/aarch64.h | ||
2586 | @@ -863,6 +863,14 @@ struct GTY (()) aarch64_frame | ||
2587 | This is the register they should use. */ | ||
2588 | unsigned spare_pred_reg; | ||
2589 | |||
2590 | + /* An SVE register that is saved below the hard frame pointer and that acts | ||
2591 | + as a probe for later allocations, or INVALID_REGNUM if none. */ | ||
2592 | + unsigned sve_save_and_probe; | ||
2593 | + | ||
2594 | + /* A register that is saved at the hard frame pointer and that acts | ||
2595 | + as a probe for later allocations, or INVALID_REGNUM if none. */ | ||
2596 | + unsigned hard_fp_save_and_probe; | ||
2597 | + | ||
2598 | bool laid_out; | ||
2599 | |||
2600 | /* True if shadow call stack should be enabled for the current function. */ | ||
2601 | diff --git a/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c b/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c | ||
2602 | index 3e01ec36c3a..3530a0d504b 100644 | ||
2603 | --- a/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c | ||
2604 | +++ b/gcc/testsuite/gcc.target/aarch64/sve/pcs/stack_clash_3.c | ||
2605 | @@ -11,11 +11,10 @@ | ||
2606 | ** mov x11, sp | ||
2607 | ** ... | ||
2608 | ** sub sp, sp, x13 | ||
2609 | -** str p4, \[sp\] | ||
2610 | ** cbz w0, [^\n]* | ||
2611 | +** str p4, \[sp\] | ||
2612 | ** ... | ||
2613 | ** ptrue p0\.b, all | ||
2614 | -** ldr p4, \[sp\] | ||
2615 | ** addvl sp, sp, #1 | ||
2616 | ** ldr x24, \[sp\], 32 | ||
2617 | ** ret | ||
2618 | @@ -39,13 +38,12 @@ test_1 (int n) | ||
2619 | ** mov x11, sp | ||
2620 | ** ... | ||
2621 | ** sub sp, sp, x13 | ||
2622 | -** str p4, \[sp\] | ||
2623 | ** cbz w0, [^\n]* | ||
2624 | +** str p4, \[sp\] | ||
2625 | ** str p5, \[sp, #1, mul vl\] | ||
2626 | ** str p6, \[sp, #2, mul vl\] | ||
2627 | ** ... | ||
2628 | ** ptrue p0\.b, all | ||
2629 | -** ldr p4, \[sp\] | ||
2630 | ** addvl sp, sp, #1 | ||
2631 | ** ldr x24, \[sp\], 32 | ||
2632 | ** ret | ||
2633 | -- | ||
2634 | 2.34.1 | ||
2635 | |||
2636 | |||
2637 | From 56df065080950bb30dda9c260f71be54269bdda5 Mon Sep 17 00:00:00 2001 | ||
2638 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
2639 | Date: Tue, 12 Sep 2023 16:07:20 +0100 | ||
2640 | Subject: [PATCH 18/19] aarch64: Remove below_hard_fp_saved_regs_size | ||
2641 | |||
2642 | After previous patches, it's no longer necessary to store | ||
2643 | saved_regs_size and below_hard_fp_saved_regs_size in the frame info. | ||
2644 | All measurements instead use the top or bottom of the frame as | ||
2645 | reference points. | ||
2646 | |||
2647 | gcc/ | ||
2648 | * config/aarch64/aarch64.h (aarch64_frame::saved_regs_size) | ||
2649 | (aarch64_frame::below_hard_fp_saved_regs_size): Delete. | ||
2650 | * config/aarch64/aarch64.cc (aarch64_layout_frame): Update accordingly. | ||
2651 | --- | ||
2652 | gcc/config/aarch64/aarch64.cc | 45 ++++++++++++++++------------------- | ||
2653 | gcc/config/aarch64/aarch64.h | 7 ------ | ||
2654 | 2 files changed, 21 insertions(+), 31 deletions(-) | ||
2655 | |||
2656 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
2657 | index 6c59c39a639..b95e805a8cc 100644 | ||
2658 | --- a/gcc/config/aarch64/aarch64.cc | ||
2659 | +++ b/gcc/config/aarch64/aarch64.cc | ||
2660 | @@ -8530,9 +8530,8 @@ aarch64_layout_frame (void) | ||
2661 | |||
2662 | /* OFFSET is now the offset of the hard frame pointer from the bottom | ||
2663 | of the callee save area. */ | ||
2664 | - frame.below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2665 | - bool saves_below_hard_fp_p | ||
2666 | - = maybe_ne (frame.below_hard_fp_saved_regs_size, 0); | ||
2667 | + auto below_hard_fp_saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2668 | + bool saves_below_hard_fp_p = maybe_ne (below_hard_fp_saved_regs_size, 0); | ||
2669 | gcc_assert (!saves_below_hard_fp_p | ||
2670 | || (frame.sve_save_and_probe != INVALID_REGNUM | ||
2671 | && known_eq (frame.reg_offset[frame.sve_save_and_probe], | ||
2672 | @@ -8602,9 +8601,8 @@ aarch64_layout_frame (void) | ||
2673 | |||
2674 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2675 | |||
2676 | - frame.saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2677 | - gcc_assert (known_eq (frame.saved_regs_size, | ||
2678 | - frame.below_hard_fp_saved_regs_size) | ||
2679 | + auto saved_regs_size = offset - frame.bytes_below_saved_regs; | ||
2680 | + gcc_assert (known_eq (saved_regs_size, below_hard_fp_saved_regs_size) | ||
2681 | || (frame.hard_fp_save_and_probe != INVALID_REGNUM | ||
2682 | && known_eq (frame.reg_offset[frame.hard_fp_save_and_probe], | ||
2683 | frame.bytes_below_hard_fp))); | ||
2684 | @@ -8613,7 +8611,7 @@ aarch64_layout_frame (void) | ||
2685 | The saving of the bottommost register counts as an implicit probe, | ||
2686 | which allows us to maintain the invariant described in the comment | ||
2687 | at expand_prologue. */ | ||
2688 | - gcc_assert (crtl->is_leaf || maybe_ne (frame.saved_regs_size, 0)); | ||
2689 | + gcc_assert (crtl->is_leaf || maybe_ne (saved_regs_size, 0)); | ||
2690 | |||
2691 | offset += get_frame_size (); | ||
2692 | offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2693 | @@ -8670,7 +8668,7 @@ aarch64_layout_frame (void) | ||
2694 | |||
2695 | HOST_WIDE_INT const_size, const_below_saved_regs, const_above_fp; | ||
2696 | HOST_WIDE_INT const_saved_regs_size; | ||
2697 | - if (known_eq (frame.saved_regs_size, 0)) | ||
2698 | + if (known_eq (saved_regs_size, 0)) | ||
2699 | frame.initial_adjust = frame.frame_size; | ||
2700 | else if (frame.frame_size.is_constant (&const_size) | ||
2701 | && const_size < max_push_offset | ||
2702 | @@ -8683,7 +8681,7 @@ aarch64_layout_frame (void) | ||
2703 | frame.callee_adjust = const_size; | ||
2704 | } | ||
2705 | else if (frame.bytes_below_saved_regs.is_constant (&const_below_saved_regs) | ||
2706 | - && frame.saved_regs_size.is_constant (&const_saved_regs_size) | ||
2707 | + && saved_regs_size.is_constant (&const_saved_regs_size) | ||
2708 | && const_below_saved_regs + const_saved_regs_size < 512 | ||
2709 | /* We could handle this case even with data below the saved | ||
2710 | registers, provided that that data left us with valid offsets | ||
2711 | @@ -8702,8 +8700,7 @@ aarch64_layout_frame (void) | ||
2712 | frame.initial_adjust = frame.frame_size; | ||
2713 | } | ||
2714 | else if (saves_below_hard_fp_p | ||
2715 | - && known_eq (frame.saved_regs_size, | ||
2716 | - frame.below_hard_fp_saved_regs_size)) | ||
2717 | + && known_eq (saved_regs_size, below_hard_fp_saved_regs_size)) | ||
2718 | { | ||
2719 | /* Frame in which all saves are SVE saves: | ||
2720 | |||
2721 | @@ -8725,7 +8722,7 @@ aarch64_layout_frame (void) | ||
2722 | [save SVE registers relative to SP] | ||
2723 | sub sp, sp, bytes_below_saved_regs */ | ||
2724 | frame.callee_adjust = const_above_fp; | ||
2725 | - frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
2726 | + frame.sve_callee_adjust = below_hard_fp_saved_regs_size; | ||
2727 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
2728 | } | ||
2729 | else | ||
2730 | @@ -8740,7 +8737,7 @@ aarch64_layout_frame (void) | ||
2731 | [save SVE registers relative to SP] | ||
2732 | sub sp, sp, bytes_below_saved_regs */ | ||
2733 | frame.initial_adjust = frame.bytes_above_hard_fp; | ||
2734 | - frame.sve_callee_adjust = frame.below_hard_fp_saved_regs_size; | ||
2735 | + frame.sve_callee_adjust = below_hard_fp_saved_regs_size; | ||
2736 | frame.final_adjust = frame.bytes_below_saved_regs; | ||
2737 | } | ||
2738 | |||
2739 | @@ -9936,17 +9933,17 @@ aarch64_epilogue_uses (int regno) | ||
2740 | | local variables | <-- frame_pointer_rtx | ||
2741 | | | | ||
2742 | +-------------------------------+ | ||
2743 | - | padding | \ | ||
2744 | - +-------------------------------+ | | ||
2745 | - | callee-saved registers | | frame.saved_regs_size | ||
2746 | - +-------------------------------+ | | ||
2747 | - | LR' | | | ||
2748 | - +-------------------------------+ | | ||
2749 | - | FP' | | | ||
2750 | - +-------------------------------+ |<- hard_frame_pointer_rtx (aligned) | ||
2751 | - | SVE vector registers | | \ | ||
2752 | - +-------------------------------+ | | below_hard_fp_saved_regs_size | ||
2753 | - | SVE predicate registers | / / | ||
2754 | + | padding | | ||
2755 | + +-------------------------------+ | ||
2756 | + | callee-saved registers | | ||
2757 | + +-------------------------------+ | ||
2758 | + | LR' | | ||
2759 | + +-------------------------------+ | ||
2760 | + | FP' | | ||
2761 | + +-------------------------------+ <-- hard_frame_pointer_rtx (aligned) | ||
2762 | + | SVE vector registers | | ||
2763 | + +-------------------------------+ | ||
2764 | + | SVE predicate registers | | ||
2765 | +-------------------------------+ | ||
2766 | | dynamic allocation | | ||
2767 | +-------------------------------+ | ||
2768 | diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h | ||
2769 | index fbfb73545ba..cfeaf4657ab 100644 | ||
2770 | --- a/gcc/config/aarch64/aarch64.h | ||
2771 | +++ b/gcc/config/aarch64/aarch64.h | ||
2772 | @@ -777,18 +777,11 @@ struct GTY (()) aarch64_frame | ||
2773 | STACK_BOUNDARY. */ | ||
2774 | HOST_WIDE_INT saved_varargs_size; | ||
2775 | |||
2776 | - /* The size of the callee-save registers with a slot in REG_OFFSET. */ | ||
2777 | - poly_int64 saved_regs_size; | ||
2778 | - | ||
2779 | /* The number of bytes between the bottom of the static frame (the bottom | ||
2780 | of the outgoing arguments) and the bottom of the register save area. | ||
2781 | This value is always a multiple of STACK_BOUNDARY. */ | ||
2782 | poly_int64 bytes_below_saved_regs; | ||
2783 | |||
2784 | - /* The size of the callee-save registers with a slot in REG_OFFSET that | ||
2785 | - are saved below the hard frame pointer. */ | ||
2786 | - poly_int64 below_hard_fp_saved_regs_size; | ||
2787 | - | ||
2788 | /* The number of bytes between the bottom of the static frame (the bottom | ||
2789 | of the outgoing arguments) and the hard frame pointer. This value is | ||
2790 | always a multiple of STACK_BOUNDARY. */ | ||
2791 | -- | ||
2792 | 2.34.1 | ||
2793 | |||
2794 | |||
2795 | From b96e66fd4ef3e36983969fb8cdd1956f551a074b Mon Sep 17 00:00:00 2001 | ||
2796 | From: Richard Sandiford <richard.sandiford@arm.com> | ||
2797 | Date: Tue, 12 Sep 2023 16:07:21 +0100 | ||
2798 | Subject: [PATCH 19/19] aarch64: Make stack smash canary protect saved | ||
2799 | registers | ||
2800 | |||
2801 | AArch64 normally puts the saved registers near the bottom of the frame, | ||
2802 | immediately above any dynamic allocations. But this means that a | ||
2803 | stack-smash attack on those dynamic allocations could overwrite the | ||
2804 | saved registers without needing to reach as far as the stack smash | ||
2805 | canary. | ||
2806 | |||
2807 | The same thing could also happen for variable-sized arguments that are | ||
2808 | passed by value, since those are allocated before a call and popped on | ||
2809 | return. | ||
2810 | |||
2811 | This patch avoids that by putting the locals (and thus the canary) below | ||
2812 | the saved registers when stack smash protection is active. | ||
2813 | |||
2814 | The patch fixes CVE-2023-4039. | ||
2815 | |||
2816 | gcc/ | ||
2817 | * config/aarch64/aarch64.cc (aarch64_save_regs_above_locals_p): | ||
2818 | New function. | ||
2819 | (aarch64_layout_frame): Use it to decide whether locals should | ||
2820 | go above or below the saved registers. | ||
2821 | (aarch64_expand_prologue): Update stack layout comment. | ||
2822 | Emit a stack tie after the final adjustment. | ||
2823 | |||
2824 | gcc/testsuite/ | ||
2825 | * gcc.target/aarch64/stack-protector-8.c: New test. | ||
2826 | * gcc.target/aarch64/stack-protector-9.c: Likewise. | ||
2827 | --- | ||
2828 | gcc/config/aarch64/aarch64.cc | 46 +++++++-- | ||
2829 | .../gcc.target/aarch64/stack-protector-8.c | 95 +++++++++++++++++++ | ||
2830 | .../gcc.target/aarch64/stack-protector-9.c | 33 +++++++ | ||
2831 | 3 files changed, 168 insertions(+), 6 deletions(-) | ||
2832 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-protector-8.c | ||
2833 | create mode 100644 gcc/testsuite/gcc.target/aarch64/stack-protector-9.c | ||
2834 | |||
2835 | diff --git a/gcc/config/aarch64/aarch64.cc b/gcc/config/aarch64/aarch64.cc | ||
2836 | index b95e805a8cc..389c0e29353 100644 | ||
2837 | --- a/gcc/config/aarch64/aarch64.cc | ||
2838 | +++ b/gcc/config/aarch64/aarch64.cc | ||
2839 | @@ -8394,6 +8394,20 @@ aarch64_needs_frame_chain (void) | ||
2840 | return aarch64_use_frame_pointer; | ||
2841 | } | ||
2842 | |||
2843 | +/* Return true if the current function should save registers above | ||
2844 | + the locals area, rather than below it. */ | ||
2845 | + | ||
2846 | +static bool | ||
2847 | +aarch64_save_regs_above_locals_p () | ||
2848 | +{ | ||
2849 | + /* When using stack smash protection, make sure that the canary slot | ||
2850 | + comes between the locals and the saved registers. Otherwise, | ||
2851 | + it would be possible for a carefully sized smash attack to change | ||
2852 | + the saved registers (particularly LR and FP) without reaching the | ||
2853 | + canary. */ | ||
2854 | + return crtl->stack_protect_guard; | ||
2855 | +} | ||
2856 | + | ||
2857 | /* Mark the registers that need to be saved by the callee and calculate | ||
2858 | the size of the callee-saved registers area and frame record (both FP | ||
2859 | and LR may be omitted). */ | ||
2860 | @@ -8405,6 +8419,7 @@ aarch64_layout_frame (void) | ||
2861 | poly_int64 vector_save_size = GET_MODE_SIZE (vector_save_mode); | ||
2862 | bool frame_related_fp_reg_p = false; | ||
2863 | aarch64_frame &frame = cfun->machine->frame; | ||
2864 | + poly_int64 top_of_locals = -1; | ||
2865 | |||
2866 | frame.emit_frame_chain = aarch64_needs_frame_chain (); | ||
2867 | |||
2868 | @@ -8471,9 +8486,16 @@ aarch64_layout_frame (void) | ||
2869 | && !crtl->abi->clobbers_full_reg_p (regno)) | ||
2870 | frame.reg_offset[regno] = SLOT_REQUIRED; | ||
2871 | |||
2872 | + bool regs_at_top_p = aarch64_save_regs_above_locals_p (); | ||
2873 | |||
2874 | poly_int64 offset = crtl->outgoing_args_size; | ||
2875 | gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT)); | ||
2876 | + if (regs_at_top_p) | ||
2877 | + { | ||
2878 | + offset += get_frame_size (); | ||
2879 | + offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2880 | + top_of_locals = offset; | ||
2881 | + } | ||
2882 | frame.bytes_below_saved_regs = offset; | ||
2883 | frame.sve_save_and_probe = INVALID_REGNUM; | ||
2884 | |||
2885 | @@ -8613,15 +8635,18 @@ aarch64_layout_frame (void) | ||
2886 | at expand_prologue. */ | ||
2887 | gcc_assert (crtl->is_leaf || maybe_ne (saved_regs_size, 0)); | ||
2888 | |||
2889 | - offset += get_frame_size (); | ||
2890 | - offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2891 | - auto top_of_locals = offset; | ||
2892 | - | ||
2893 | + if (!regs_at_top_p) | ||
2894 | + { | ||
2895 | + offset += get_frame_size (); | ||
2896 | + offset = aligned_upper_bound (offset, STACK_BOUNDARY / BITS_PER_UNIT); | ||
2897 | + top_of_locals = offset; | ||
2898 | + } | ||
2899 | offset += frame.saved_varargs_size; | ||
2900 | gcc_assert (multiple_p (offset, STACK_BOUNDARY / BITS_PER_UNIT)); | ||
2901 | frame.frame_size = offset; | ||
2902 | |||
2903 | frame.bytes_above_hard_fp = frame.frame_size - frame.bytes_below_hard_fp; | ||
2904 | + gcc_assert (known_ge (top_of_locals, 0)); | ||
2905 | frame.bytes_above_locals = frame.frame_size - top_of_locals; | ||
2906 | |||
2907 | frame.initial_adjust = 0; | ||
2908 | @@ -9930,10 +9955,10 @@ aarch64_epilogue_uses (int regno) | ||
2909 | | for register varargs | | ||
2910 | | | | ||
2911 | +-------------------------------+ | ||
2912 | - | local variables | <-- frame_pointer_rtx | ||
2913 | + | local variables (1) | <-- frame_pointer_rtx | ||
2914 | | | | ||
2915 | +-------------------------------+ | ||
2916 | - | padding | | ||
2917 | + | padding (1) | | ||
2918 | +-------------------------------+ | ||
2919 | | callee-saved registers | | ||
2920 | +-------------------------------+ | ||
2921 | @@ -9945,6 +9970,10 @@ aarch64_epilogue_uses (int regno) | ||
2922 | +-------------------------------+ | ||
2923 | | SVE predicate registers | | ||
2924 | +-------------------------------+ | ||
2925 | + | local variables (2) | | ||
2926 | + +-------------------------------+ | ||
2927 | + | padding (2) | | ||
2928 | + +-------------------------------+ | ||
2929 | | dynamic allocation | | ||
2930 | +-------------------------------+ | ||
2931 | | padding | | ||
2932 | @@ -9954,6 +9983,9 @@ aarch64_epilogue_uses (int regno) | ||
2933 | +-------------------------------+ | ||
2934 | | | <-- stack_pointer_rtx (aligned) | ||
2935 | |||
2936 | + The regions marked (1) and (2) are mutually exclusive. (2) is used | ||
2937 | + when aarch64_save_regs_above_locals_p is true. | ||
2938 | + | ||
2939 | Dynamic stack allocations via alloca() decrease stack_pointer_rtx | ||
2940 | but leave frame_pointer_rtx and hard_frame_pointer_rtx | ||
2941 | unchanged. | ||
2942 | @@ -10149,6 +10181,8 @@ aarch64_expand_prologue (void) | ||
2943 | gcc_assert (known_eq (bytes_below_sp, final_adjust)); | ||
2944 | aarch64_allocate_and_probe_stack_space (tmp1_rtx, tmp0_rtx, final_adjust, | ||
2945 | !frame_pointer_needed, true); | ||
2946 | + if (emit_frame_chain && maybe_ne (final_adjust, 0)) | ||
2947 | + emit_insn (gen_stack_tie (stack_pointer_rtx, hard_frame_pointer_rtx)); | ||
2948 | } | ||
2949 | |||
2950 | /* Return TRUE if we can use a simple_return insn. | ||
2951 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c b/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c | ||
2952 | new file mode 100644 | ||
2953 | index 00000000000..e71d820e365 | ||
2954 | --- /dev/null | ||
2955 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-protector-8.c | ||
2956 | @@ -0,0 +1,95 @@ | ||
2957 | +/* { dg-options " -O -fstack-protector-strong -mstack-protector-guard=sysreg -mstack-protector-guard-reg=tpidr2_el0 -mstack-protector-guard-offset=16" } */ | ||
2958 | +/* { dg-final { check-function-bodies "**" "" } } */ | ||
2959 | + | ||
2960 | +void g(void *); | ||
2961 | +__SVBool_t *h(void *); | ||
2962 | + | ||
2963 | +/* | ||
2964 | +** test1: | ||
2965 | +** sub sp, sp, #288 | ||
2966 | +** stp x29, x30, \[sp, #?272\] | ||
2967 | +** add x29, sp, #?272 | ||
2968 | +** mrs (x[0-9]+), tpidr2_el0 | ||
2969 | +** ldr (x[0-9]+), \[\1, #?16\] | ||
2970 | +** str \2, \[sp, #?264\] | ||
2971 | +** mov \2, #?0 | ||
2972 | +** add x0, sp, #?8 | ||
2973 | +** bl g | ||
2974 | +** ... | ||
2975 | +** mrs .* | ||
2976 | +** ... | ||
2977 | +** bne .* | ||
2978 | +** ... | ||
2979 | +** ldp x29, x30, \[sp, #?272\] | ||
2980 | +** add sp, sp, #?288 | ||
2981 | +** ret | ||
2982 | +** bl __stack_chk_fail | ||
2983 | +*/ | ||
2984 | +int test1() { | ||
2985 | + int y[0x40]; | ||
2986 | + g(y); | ||
2987 | + return 1; | ||
2988 | +} | ||
2989 | + | ||
2990 | +/* | ||
2991 | +** test2: | ||
2992 | +** stp x29, x30, \[sp, #?-16\]! | ||
2993 | +** mov x29, sp | ||
2994 | +** sub sp, sp, #1040 | ||
2995 | +** mrs (x[0-9]+), tpidr2_el0 | ||
2996 | +** ldr (x[0-9]+), \[\1, #?16\] | ||
2997 | +** str \2, \[sp, #?1032\] | ||
2998 | +** mov \2, #?0 | ||
2999 | +** add x0, sp, #?8 | ||
3000 | +** bl g | ||
3001 | +** ... | ||
3002 | +** mrs .* | ||
3003 | +** ... | ||
3004 | +** bne .* | ||
3005 | +** ... | ||
3006 | +** add sp, sp, #?1040 | ||
3007 | +** ldp x29, x30, \[sp\], #?16 | ||
3008 | +** ret | ||
3009 | +** bl __stack_chk_fail | ||
3010 | +*/ | ||
3011 | +int test2() { | ||
3012 | + int y[0x100]; | ||
3013 | + g(y); | ||
3014 | + return 1; | ||
3015 | +} | ||
3016 | + | ||
3017 | +#pragma GCC target "+sve" | ||
3018 | + | ||
3019 | +/* | ||
3020 | +** test3: | ||
3021 | +** stp x29, x30, \[sp, #?-16\]! | ||
3022 | +** mov x29, sp | ||
3023 | +** addvl sp, sp, #-18 | ||
3024 | +** ... | ||
3025 | +** str p4, \[sp\] | ||
3026 | +** ... | ||
3027 | +** sub sp, sp, #272 | ||
3028 | +** mrs (x[0-9]+), tpidr2_el0 | ||
3029 | +** ldr (x[0-9]+), \[\1, #?16\] | ||
3030 | +** str \2, \[sp, #?264\] | ||
3031 | +** mov \2, #?0 | ||
3032 | +** add x0, sp, #?8 | ||
3033 | +** bl h | ||
3034 | +** ... | ||
3035 | +** mrs .* | ||
3036 | +** ... | ||
3037 | +** bne .* | ||
3038 | +** ... | ||
3039 | +** add sp, sp, #?272 | ||
3040 | +** ... | ||
3041 | +** ldr p4, \[sp\] | ||
3042 | +** ... | ||
3043 | +** addvl sp, sp, #18 | ||
3044 | +** ldp x29, x30, \[sp\], #?16 | ||
3045 | +** ret | ||
3046 | +** bl __stack_chk_fail | ||
3047 | +*/ | ||
3048 | +__SVBool_t test3() { | ||
3049 | + int y[0x40]; | ||
3050 | + return *h(y); | ||
3051 | +} | ||
3052 | diff --git a/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c b/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c | ||
3053 | new file mode 100644 | ||
3054 | index 00000000000..58f322aa480 | ||
3055 | --- /dev/null | ||
3056 | +++ b/gcc/testsuite/gcc.target/aarch64/stack-protector-9.c | ||
3057 | @@ -0,0 +1,33 @@ | ||
3058 | +/* { dg-options "-O2 -mcpu=neoverse-v1 -fstack-protector-all" } */ | ||
3059 | +/* { dg-final { check-function-bodies "**" "" } } */ | ||
3060 | + | ||
3061 | +/* | ||
3062 | +** main: | ||
3063 | +** ... | ||
3064 | +** stp x29, x30, \[sp, #?-[0-9]+\]! | ||
3065 | +** ... | ||
3066 | +** sub sp, sp, #[0-9]+ | ||
3067 | +** ... | ||
3068 | +** str x[0-9]+, \[x29, #?-8\] | ||
3069 | +** ... | ||
3070 | +*/ | ||
3071 | +int f(const char *); | ||
3072 | +void g(void *); | ||
3073 | +int main(int argc, char* argv[]) | ||
3074 | +{ | ||
3075 | + int a; | ||
3076 | + int b; | ||
3077 | + char c[2+f(argv[1])]; | ||
3078 | + int d[0x100]; | ||
3079 | + char y; | ||
3080 | + | ||
3081 | + y=42; a=4; b=10; | ||
3082 | + c[0] = 'h'; c[1] = '\0'; | ||
3083 | + | ||
3084 | + c[f(argv[2])] = '\0'; | ||
3085 | + | ||
3086 | + __builtin_printf("%d %d\n%s\n", a, b, c); | ||
3087 | + g(d); | ||
3088 | + | ||
3089 | + return 0; | ||
3090 | +} | ||
3091 | -- | ||
3092 | 2.34.1 | ||
3093 | |||
diff --git a/meta/recipes-devtools/gcc/gcc_13.2.bb b/meta/recipes-devtools/gcc/gcc_14.1.bb index 255fe552bd..255fe552bd 100644 --- a/meta/recipes-devtools/gcc/gcc_13.2.bb +++ b/meta/recipes-devtools/gcc/gcc_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/libgcc-initial_13.2.bb b/meta/recipes-devtools/gcc/libgcc-initial_14.1.bb index a259082b47..a259082b47 100644 --- a/meta/recipes-devtools/gcc/libgcc-initial_13.2.bb +++ b/meta/recipes-devtools/gcc/libgcc-initial_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/libgcc_13.2.bb b/meta/recipes-devtools/gcc/libgcc_14.1.bb index fdcd6cc0da..fdcd6cc0da 100644 --- a/meta/recipes-devtools/gcc/libgcc_13.2.bb +++ b/meta/recipes-devtools/gcc/libgcc_14.1.bb | |||
diff --git a/meta/recipes-devtools/gcc/libgfortran_13.2.bb b/meta/recipes-devtools/gcc/libgfortran_14.1.bb index 71dd8b4bdc..71dd8b4bdc 100644 --- a/meta/recipes-devtools/gcc/libgfortran_13.2.bb +++ b/meta/recipes-devtools/gcc/libgfortran_14.1.bb | |||
diff --git a/meta/recipes-devtools/gdb/gdb.inc b/meta/recipes-devtools/gdb/gdb.inc index 81ac441462..c2fbcb2ac6 100644 --- a/meta/recipes-devtools/gdb/gdb.inc +++ b/meta/recipes-devtools/gdb/gdb.inc | |||
@@ -10,7 +10,6 @@ SRC_URI = "${GNU_MIRROR}/gdb/gdb-${PV}.tar.xz \ | |||
10 | file://0003-Dont-disable-libreadline.a-when-using-disable-static.patch \ | 10 | file://0003-Dont-disable-libreadline.a-when-using-disable-static.patch \ |
11 | file://0004-use-asm-sgidefs.h.patch \ | 11 | file://0004-use-asm-sgidefs.h.patch \ |
12 | file://0005-Change-order-of-CFLAGS.patch \ | 12 | file://0005-Change-order-of-CFLAGS.patch \ |
13 | file://0006-resolve-restrict-keyword-conflict.patch \ | ||
14 | file://0007-Fix-invalid-sigprocmask-call.patch \ | 13 | file://0007-Fix-invalid-sigprocmask-call.patch \ |
15 | file://0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \ | 14 | file://0008-Define-alignof-using-_Alignof-when-using-C11-or-newe.patch \ |
16 | " | 15 | " |
diff --git a/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch b/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch deleted file mode 100644 index 45388c5ac5..0000000000 --- a/meta/recipes-devtools/gdb/gdb/0006-resolve-restrict-keyword-conflict.patch +++ /dev/null | |||
@@ -1,45 +0,0 @@ | |||
1 | From 477f1b2049c7f940b8e8fda4ac396cfe322b269f Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Tue, 10 May 2016 08:47:05 -0700 | ||
4 | Subject: [PATCH] resolve restrict keyword conflict | ||
5 | |||
6 | GCC detects that we call 'restrict' as param name in function | ||
7 | signatures and complains since both params are called 'restrict' | ||
8 | therefore we use __restrict to denote the C99 keywork | ||
9 | |||
10 | Upstream-Status: Pending | ||
11 | |||
12 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
13 | --- | ||
14 | gnulib/import/sys_time.in.h | 8 ++++---- | ||
15 | 1 file changed, 4 insertions(+), 4 deletions(-) | ||
16 | |||
17 | diff --git a/gnulib/import/sys_time.in.h b/gnulib/import/sys_time.in.h | ||
18 | index 87db1a88745..e6b98c7e467 100644 | ||
19 | --- a/gnulib/import/sys_time.in.h | ||
20 | +++ b/gnulib/import/sys_time.in.h | ||
21 | @@ -93,20 +93,20 @@ struct timeval | ||
22 | # define gettimeofday rpl_gettimeofday | ||
23 | # endif | ||
24 | _GL_FUNCDECL_RPL (gettimeofday, int, | ||
25 | - (struct timeval *restrict, void *restrict) | ||
26 | + (struct timeval *__restrict, void *__restrict) | ||
27 | _GL_ARG_NONNULL ((1))); | ||
28 | _GL_CXXALIAS_RPL (gettimeofday, int, | ||
29 | - (struct timeval *restrict, void *restrict)); | ||
30 | + (struct timeval *__restrict, void *__restrict)); | ||
31 | # else | ||
32 | # if !@HAVE_GETTIMEOFDAY@ | ||
33 | _GL_FUNCDECL_SYS (gettimeofday, int, | ||
34 | - (struct timeval *restrict, void *restrict) | ||
35 | + (struct timeval *__restrict, void *__restrict) | ||
36 | _GL_ARG_NONNULL ((1))); | ||
37 | # endif | ||
38 | /* Need to cast, because on glibc systems, by default, the second argument is | ||
39 | struct timezone *. */ | ||
40 | _GL_CXXALIAS_SYS_CAST (gettimeofday, int, | ||
41 | - (struct timeval *restrict, void *restrict)); | ||
42 | + (struct timeval *__restrict, void *__restrict)); | ||
43 | # endif | ||
44 | _GL_CXXALIASWARN (gettimeofday); | ||
45 | # if defined __cplusplus && defined GNULIB_NAMESPACE | ||
diff --git a/meta/recipes-devtools/git/git_2.44.0.bb b/meta/recipes-devtools/git/git_2.44.1.bb index 90e555eba7..53d67eb40a 100644 --- a/meta/recipes-devtools/git/git_2.44.0.bb +++ b/meta/recipes-devtools/git/git_2.44.1.bb | |||
@@ -40,6 +40,7 @@ EXTRA_OECONF = "--with-perl=${STAGING_BINDIR_NATIVE}/perl-native/perl \ | |||
40 | --without-iconv \ | 40 | --without-iconv \ |
41 | " | 41 | " |
42 | EXTRA_OECONF:append:class-nativesdk = " --with-gitconfig=/etc/gitconfig " | 42 | EXTRA_OECONF:append:class-nativesdk = " --with-gitconfig=/etc/gitconfig " |
43 | EXTRA_OECONF:append:class-native = " --with-gitconfig=/etc/gitconfig " | ||
43 | 44 | ||
44 | # Needs brokensep as this doesn't use automake | 45 | # Needs brokensep as this doesn't use automake |
45 | inherit autotools-brokensep perlnative bash-completion manpages | 46 | inherit autotools-brokensep perlnative bash-completion manpages |
@@ -163,4 +164,4 @@ EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no \ | |||
163 | " | 164 | " |
164 | EXTRA_OEMAKE += "NO_GETTEXT=1" | 165 | EXTRA_OEMAKE += "NO_GETTEXT=1" |
165 | 166 | ||
166 | SRC_URI[tarball.sha256sum] = "f9e36f085458fe9688fbbe7846b8c4770b13d161fcd8953655f36b2b85f06b76" | 167 | SRC_URI[tarball.sha256sum] = "118214bb8d7ba971a62741416e757562b8f5451cefc087a407e91857897c92cc" |
diff --git a/meta/recipes-devtools/go/go-1.22.2.inc b/meta/recipes-devtools/go/go-1.22.3.inc index b399207311..34703bc1fa 100644 --- a/meta/recipes-devtools/go/go-1.22.2.inc +++ b/meta/recipes-devtools/go/go-1.22.3.inc | |||
@@ -15,4 +15,4 @@ SRC_URI += "\ | |||
15 | file://0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch \ | 15 | file://0008-src-cmd-dist-buildgo.go-do-not-hardcode-host-compile.patch \ |
16 | file://0009-go-Filter-build-paths-on-staticly-linked-arches.patch \ | 16 | file://0009-go-Filter-build-paths-on-staticly-linked-arches.patch \ |
17 | " | 17 | " |
18 | SRC_URI[main.sha256sum] = "374ea82b289ec738e968267cac59c7d5ff180f9492250254784b2044e90df5a9" | 18 | SRC_URI[main.sha256sum] = "80648ef34f903193d72a59c0dff019f5f98ae0c9aa13ade0b0ecbff991a76f68" |
diff --git a/meta/recipes-devtools/go/go-binary-native_1.22.2.bb b/meta/recipes-devtools/go/go-binary-native_1.22.3.bb index 0f00509f03..b67d97608d 100644 --- a/meta/recipes-devtools/go/go-binary-native_1.22.2.bb +++ b/meta/recipes-devtools/go/go-binary-native_1.22.3.bb | |||
@@ -9,9 +9,9 @@ PROVIDES = "go-native" | |||
9 | 9 | ||
10 | # Checksums available at https://go.dev/dl/ | 10 | # Checksums available at https://go.dev/dl/ |
11 | SRC_URI = "https://dl.google.com/go/go${PV}.${BUILD_GOOS}-${BUILD_GOARCH}.tar.gz;name=go_${BUILD_GOTUPLE}" | 11 | SRC_URI = "https://dl.google.com/go/go${PV}.${BUILD_GOOS}-${BUILD_GOARCH}.tar.gz;name=go_${BUILD_GOTUPLE}" |
12 | SRC_URI[go_linux_amd64.sha256sum] = "5901c52b7a78002aeff14a21f93e0f064f74ce1360fce51c6ee68cd471216a17" | 12 | SRC_URI[go_linux_amd64.sha256sum] = "8920ea521bad8f6b7bc377b4824982e011c19af27df88a815e3586ea895f1b36" |
13 | SRC_URI[go_linux_arm64.sha256sum] = "36e720b2d564980c162a48c7e97da2e407dfcc4239e1e58d98082dfa2486a0c1" | 13 | SRC_URI[go_linux_arm64.sha256sum] = "6c33e52a5b26e7aa021b94475587fce80043a727a54ceb0eee2f9fc160646434" |
14 | SRC_URI[go_linux_ppc64le.sha256sum] = "251a8886c5113be6490bdbb955ddee98763b49c9b1bf4c8364c02d3b482dab00" | 14 | SRC_URI[go_linux_ppc64le.sha256sum] = "04b7b05283de30dd2da20bf3114b2e22cc727938aed3148babaf35cc951051ac" |
15 | 15 | ||
16 | UPSTREAM_CHECK_URI = "https://golang.org/dl/" | 16 | UPSTREAM_CHECK_URI = "https://golang.org/dl/" |
17 | UPSTREAM_CHECK_REGEX = "go(?P<pver>\d+(\.\d+)+)\.linux" | 17 | UPSTREAM_CHECK_REGEX = "go(?P<pver>\d+(\.\d+)+)\.linux" |
diff --git a/meta/recipes-devtools/go/go-cross-canadian_1.22.2.bb b/meta/recipes-devtools/go/go-cross-canadian_1.22.3.bb index 7ac9449e47..7ac9449e47 100644 --- a/meta/recipes-devtools/go/go-cross-canadian_1.22.2.bb +++ b/meta/recipes-devtools/go/go-cross-canadian_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/go/go-cross_1.22.2.bb b/meta/recipes-devtools/go/go-cross_1.22.3.bb index 80b5a03f6c..80b5a03f6c 100644 --- a/meta/recipes-devtools/go/go-cross_1.22.2.bb +++ b/meta/recipes-devtools/go/go-cross_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/go/go-crosssdk_1.22.2.bb b/meta/recipes-devtools/go/go-crosssdk_1.22.3.bb index 1857c8a577..1857c8a577 100644 --- a/meta/recipes-devtools/go/go-crosssdk_1.22.2.bb +++ b/meta/recipes-devtools/go/go-crosssdk_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/go/go-native_1.22.2.bb b/meta/recipes-devtools/go/go-native_1.22.3.bb index ddf25b2c9b..ddf25b2c9b 100644 --- a/meta/recipes-devtools/go/go-native_1.22.2.bb +++ b/meta/recipes-devtools/go/go-native_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/go/go-runtime_1.22.2.bb b/meta/recipes-devtools/go/go-runtime_1.22.3.bb index 63464a1501..63464a1501 100644 --- a/meta/recipes-devtools/go/go-runtime_1.22.2.bb +++ b/meta/recipes-devtools/go/go-runtime_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/go/go_1.22.2.bb b/meta/recipes-devtools/go/go_1.22.3.bb index 46f5fbc6be..46f5fbc6be 100644 --- a/meta/recipes-devtools/go/go_1.22.2.bb +++ b/meta/recipes-devtools/go/go_1.22.3.bb | |||
diff --git a/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb b/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb index 56a9321fb9..dd1b257b10 100644 --- a/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb +++ b/meta/recipes-devtools/icecc-create-env/icecc-create-env_0.1.bb | |||
@@ -26,7 +26,8 @@ ICECC_DISABLED = "1" | |||
26 | PATCHTOOL = "patch" | 26 | PATCHTOOL = "patch" |
27 | SRC_URI = "file://icecc-create-env" | 27 | SRC_URI = "file://icecc-create-env" |
28 | 28 | ||
29 | S = "${WORKDIR}" | 29 | S = "${WORKDIR}/sources" |
30 | UNPACKDIR = "${S}" | ||
30 | 31 | ||
31 | do_install() { | 32 | do_install() { |
32 | install -d ${D}/${bindir} | 33 | install -d ${D}/${bindir} |
diff --git a/meta/recipes-devtools/libedit/libedit_20230828-3.1.bb b/meta/recipes-devtools/libedit/libedit_20240517-3.1.bb index 1684b57d31..f8fa871ec9 100644 --- a/meta/recipes-devtools/libedit/libedit_20230828-3.1.bb +++ b/meta/recipes-devtools/libedit/libedit_20240517-3.1.bb | |||
@@ -13,7 +13,7 @@ inherit autotools | |||
13 | SRC_URI = "http://www.thrysoee.dk/editline/${BP}.tar.gz \ | 13 | SRC_URI = "http://www.thrysoee.dk/editline/${BP}.tar.gz \ |
14 | file://stdc-predef.patch \ | 14 | file://stdc-predef.patch \ |
15 | " | 15 | " |
16 | SRC_URI[sha256sum] = "4ee8182b6e569290e7d1f44f0f78dac8716b35f656b76528f699c69c98814dad" | 16 | SRC_URI[sha256sum] = "3a489097bb4115495f3bd85ae782852b7097c556d9500088d74b6fa38dbd12ff" |
17 | 17 | ||
18 | BBCLASSEXTEND = "native nativesdk" | 18 | BBCLASSEXTEND = "native nativesdk" |
19 | 19 | ||
diff --git a/meta/recipes-devtools/llvm/llvm_git.bb b/meta/recipes-devtools/llvm/llvm_18.1.6.bb index 6413b041a8..189f5b1146 100644 --- a/meta/recipes-devtools/llvm/llvm_git.bb +++ b/meta/recipes-devtools/llvm/llvm_18.1.6.bb | |||
@@ -13,27 +13,26 @@ DEPENDS = "libffi libxml2 zlib zstd libedit ninja-native llvm-native" | |||
13 | RDEPENDS:${PN}:append:class-target = " ncurses-terminfo" | 13 | RDEPENDS:${PN}:append:class-target = " ncurses-terminfo" |
14 | 14 | ||
15 | inherit cmake pkgconfig | 15 | inherit cmake pkgconfig |
16 | |||
17 | # could be 'rcX' or 'git' or empty ( for release ) | 16 | # could be 'rcX' or 'git' or empty ( for release ) |
18 | VER_SUFFIX = "" | 17 | VER_SUFFIX = "" |
19 | 18 | ||
20 | PV = "18.1.5${VER_SUFFIX}" | 19 | PV .= "${VER_SUFFIX}" |
21 | 20 | ||
22 | MAJOR_VERSION = "${@oe.utils.trim_version("${PV}", 1)}" | 21 | MAJOR_VERSION = "${@oe.utils.trim_version("${PV}", 1)}" |
23 | 22 | ||
24 | LLVM_RELEASE = "${PV}" | 23 | LLVM_RELEASE = "${PV}" |
25 | 24 | ||
26 | BRANCH = "release/${MAJOR_VERSION}.x" | 25 | SRC_URI = "https://github.com/llvm/llvm-project/releases/download/llvmorg-${PV}/llvm-project-${PV}.src.tar.xz \ |
27 | SRCREV = "617a15a9eac96088ae5e9134248d8236e34b91b1" | ||
28 | SRC_URI = "git://github.com/llvm/llvm-project.git;branch=${BRANCH};protocol=https \ | ||
29 | file://0007-llvm-allow-env-override-of-exe-path.patch;striplevel=2 \ | 26 | file://0007-llvm-allow-env-override-of-exe-path.patch;striplevel=2 \ |
30 | file://0001-AsmMatcherEmitter-sort-ClassInfo-lists-by-name-as-we.patch;striplevel=2 \ | 27 | file://0001-AsmMatcherEmitter-sort-ClassInfo-lists-by-name-as-we.patch;striplevel=2 \ |
31 | file://llvm-config \ | 28 | file://llvm-config \ |
32 | " | 29 | " |
30 | SRC_URI[sha256sum] = "bd4b4cb6374bcd5fc5a3ba60cb80425d29da34f316b8821abc12c0db225cf6b4" | ||
33 | 31 | ||
34 | UPSTREAM_CHECK_GITTAGREGEX = "llvmorg-(?P<pver>\d+(\.\d+)+)" | 32 | UPSTREAM_CHECK_URI = "https://github.com/llvm/llvm-project" |
33 | UPSTREAM_CHECK_REGEX = "llvmorg-(?P<pver>\d+(\.\d+)+)" | ||
35 | 34 | ||
36 | S = "${WORKDIR}/git/llvm" | 35 | S = "${WORKDIR}/llvm-project-${PV}.src/llvm" |
37 | 36 | ||
38 | LLVM_INSTALL_DIR = "${WORKDIR}/llvm-install" | 37 | LLVM_INSTALL_DIR = "${WORKDIR}/llvm-install" |
39 | 38 | ||
diff --git a/meta/recipes-devtools/lua/lua_5.4.6.bb b/meta/recipes-devtools/lua/lua_5.4.6.bb index 65f19ae247..17dc8fb17d 100644 --- a/meta/recipes-devtools/lua/lua_5.4.6.bb +++ b/meta/recipes-devtools/lua/lua_5.4.6.bb | |||
@@ -51,7 +51,7 @@ do_install () { | |||
51 | } | 51 | } |
52 | 52 | ||
53 | do_install_ptest () { | 53 | do_install_ptest () { |
54 | cp -R --no-dereference --preserve=mode,links -v ${WORKDIR}/lua-${PV_testsuites}-tests ${D}${PTEST_PATH}/test | 54 | cp -R --no-dereference --preserve=mode,links -v ${UNPACKDIR}/lua-${PV_testsuites}-tests ${D}${PTEST_PATH}/test |
55 | } | 55 | } |
56 | 56 | ||
57 | do_install_ptest:append:libc-musl () { | 57 | do_install_ptest:append:libc-musl () { |
diff --git a/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb b/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb index 0d6c7a01eb..7b11093364 100644 --- a/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb +++ b/meta/recipes-devtools/makedevs/makedevs_1.0.1.bb | |||
@@ -5,7 +5,8 @@ LIC_FILES_CHKSUM = "file://makedevs.c;beginline=2;endline=2;md5=c3817b10013a3007 | |||
5 | SECTION = "base" | 5 | SECTION = "base" |
6 | SRC_URI = "file://makedevs.c" | 6 | SRC_URI = "file://makedevs.c" |
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | FILES:${PN}:append:class-nativesdk = " ${datadir}" | 11 | FILES:${PN}:append:class-nativesdk = " ${datadir}" |
11 | 12 | ||
diff --git a/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch b/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch index 8ea7c35950..94129b2f25 100644 --- a/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch +++ b/meta/recipes-devtools/meson/meson/0001-Make-CPU-family-warnings-fatal.patch | |||
@@ -1,21 +1,20 @@ | |||
1 | From b77cbe67df5fa0998946503f207c256ee740bb5f Mon Sep 17 00:00:00 2001 | 1 | From c61c93f43b70ba0670d41e841bff9f2a7186cc2f Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@intel.com> | 2 | From: Ross Burton <ross.burton@intel.com> |
3 | Date: Tue, 3 Jul 2018 13:59:09 +0100 | 3 | Date: Tue, 3 Jul 2018 13:59:09 +0100 |
4 | Subject: [PATCH] Make CPU family warnings fatal | 4 | Subject: [PATCH] Make CPU family warnings fatal |
5 | 5 | ||
6 | Upstream-Status: Inappropriate [OE specific] | 6 | Upstream-Status: Inappropriate [OE specific] |
7 | Signed-off-by: Ross Burton <ross.burton@intel.com> | 7 | Signed-off-by: Ross Burton <ross.burton@intel.com> |
8 | |||
9 | --- | 8 | --- |
10 | mesonbuild/envconfig.py | 4 ++-- | 9 | mesonbuild/envconfig.py | 4 ++-- |
11 | mesonbuild/environment.py | 6 ++---- | 10 | mesonbuild/environment.py | 6 ++---- |
12 | 2 files changed, 4 insertions(+), 6 deletions(-) | 11 | 2 files changed, 4 insertions(+), 6 deletions(-) |
13 | 12 | ||
14 | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py | 13 | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py |
15 | index 07f1229..a35c356 100644 | 14 | index 0e9cd23..b44e60c 100644 |
16 | --- a/mesonbuild/envconfig.py | 15 | --- a/mesonbuild/envconfig.py |
17 | +++ b/mesonbuild/envconfig.py | 16 | +++ b/mesonbuild/envconfig.py |
18 | @@ -285,8 +285,8 @@ class MachineInfo(HoldableObject): | 17 | @@ -276,8 +276,8 @@ class MachineInfo(HoldableObject): |
19 | 'but is missing {}.'.format(minimum_literal - set(literal))) | 18 | 'but is missing {}.'.format(minimum_literal - set(literal))) |
20 | 19 | ||
21 | cpu_family = literal['cpu_family'] | 20 | cpu_family = literal['cpu_family'] |
@@ -27,10 +26,10 @@ index 07f1229..a35c356 100644 | |||
27 | endian = literal['endian'] | 26 | endian = literal['endian'] |
28 | if endian not in ('little', 'big'): | 27 | if endian not in ('little', 'big'): |
29 | diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py | 28 | diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py |
30 | index 2ba2054..d798e3b 100644 | 29 | index af69f64..248d6dd 100644 |
31 | --- a/mesonbuild/environment.py | 30 | --- a/mesonbuild/environment.py |
32 | +++ b/mesonbuild/environment.py | 31 | +++ b/mesonbuild/environment.py |
33 | @@ -359,10 +359,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str: | 32 | @@ -379,10 +379,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str: |
34 | if compilers and not any_compiler_has_define(compilers, '__mips64'): | 33 | if compilers and not any_compiler_has_define(compilers, '__mips64'): |
35 | trial = 'mips' | 34 | trial = 'mips' |
36 | 35 | ||
diff --git a/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch b/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch index 2e0a4b1bbe..9f3f516a5c 100644 --- a/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch +++ b/meta/recipes-devtools/meson/meson/0001-python-module-do-not-manipulate-the-environment-when.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From e85683698aa3556bf14fc6d35f2c067f16af520b Mon Sep 17 00:00:00 2001 | 1 | From b4c0602a56d3517ab66b98a7dbb69defe77d29a3 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Mon, 19 Nov 2018 14:24:26 +0100 | 3 | Date: Mon, 19 Nov 2018 14:24:26 +0100 |
4 | Subject: [PATCH] python module: do not manipulate the environment when calling | 4 | Subject: [PATCH] python module: do not manipulate the environment when calling |
@@ -6,16 +6,15 @@ Subject: [PATCH] python module: do not manipulate the environment when calling | |||
6 | 6 | ||
7 | Upstream-Status: Inappropriate [oe-core specific] | 7 | Upstream-Status: Inappropriate [oe-core specific] |
8 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | 8 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> |
9 | |||
10 | --- | 9 | --- |
11 | mesonbuild/dependencies/python.py | 6 +----- | 10 | mesonbuild/dependencies/python.py | 6 +----- |
12 | 1 file changed, 1 insertion(+), 5 deletions(-) | 11 | 1 file changed, 1 insertion(+), 5 deletions(-) |
13 | 12 | ||
14 | diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py | 13 | diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py |
15 | index 9aea6bd..8c13ede 100644 | 14 | index b9b17f8..a305afb 100644 |
16 | --- a/mesonbuild/dependencies/python.py | 15 | --- a/mesonbuild/dependencies/python.py |
17 | +++ b/mesonbuild/dependencies/python.py | 16 | +++ b/mesonbuild/dependencies/python.py |
18 | @@ -380,9 +380,6 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice', | 17 | @@ -381,9 +381,6 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice', |
19 | empty.name = 'python' | 18 | empty.name = 'python' |
20 | return empty | 19 | return empty |
21 | 20 | ||
@@ -25,7 +24,7 @@ index 9aea6bd..8c13ede 100644 | |||
25 | try: | 24 | try: |
26 | return PythonPkgConfigDependency(name, env, kwargs, installation, True) | 25 | return PythonPkgConfigDependency(name, env, kwargs, installation, True) |
27 | finally: | 26 | finally: |
28 | @@ -391,8 +388,7 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice', | 27 | @@ -392,8 +389,7 @@ def python_factory(env: 'Environment', for_machine: 'MachineChoice', |
29 | os.environ[name] = value | 28 | os.environ[name] = value |
30 | elif name in os.environ: | 29 | elif name in os.environ: |
31 | del os.environ[name] | 30 | del os.environ[name] |
diff --git a/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch b/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch index a8396f30bb..fa5ea57d5b 100644 --- a/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch +++ b/meta/recipes-devtools/meson/meson/0002-Support-building-allarch-recipes-again.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 6fb8db54929b40e1fd7ac949ef44f0d37df0bae9 Mon Sep 17 00:00:00 2001 | 1 | From 7f69bfabb87d311d7409ea6699f7dee8e9b3a95b Mon Sep 17 00:00:00 2001 |
2 | From: Peter Kjellerstedt <pkj@axis.com> | 2 | From: Peter Kjellerstedt <pkj@axis.com> |
3 | Date: Thu, 26 Jul 2018 16:32:49 +0200 | 3 | Date: Thu, 26 Jul 2018 16:32:49 +0200 |
4 | Subject: [PATCH] Support building allarch recipes again | 4 | Subject: [PATCH] Support building allarch recipes again |
@@ -7,16 +7,15 @@ This registers "allarch" as a known CPU family. | |||
7 | 7 | ||
8 | Upstream-Status: Inappropriate [OE specific] | 8 | Upstream-Status: Inappropriate [OE specific] |
9 | Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com> | 9 | Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com> |
10 | |||
11 | --- | 10 | --- |
12 | mesonbuild/envconfig.py | 1 + | 11 | mesonbuild/envconfig.py | 1 + |
13 | 1 file changed, 1 insertion(+) | 12 | 1 file changed, 1 insertion(+) |
14 | 13 | ||
15 | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py | 14 | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py |
16 | index a35c356..436355f 100644 | 15 | index b44e60c..c70e8cb 100644 |
17 | --- a/mesonbuild/envconfig.py | 16 | --- a/mesonbuild/envconfig.py |
18 | +++ b/mesonbuild/envconfig.py | 17 | +++ b/mesonbuild/envconfig.py |
19 | @@ -38,6 +38,7 @@ from pathlib import Path | 18 | @@ -28,6 +28,7 @@ from pathlib import Path |
20 | 19 | ||
21 | 20 | ||
22 | known_cpu_families = ( | 21 | known_cpu_families = ( |
diff --git a/meta/recipes-devtools/meson/meson_1.3.1.bb b/meta/recipes-devtools/meson/meson_1.4.0.bb index 3d2eedca10..5db100ff1c 100644 --- a/meta/recipes-devtools/meson/meson_1.3.1.bb +++ b/meta/recipes-devtools/meson/meson_1.4.0.bb | |||
@@ -15,7 +15,7 @@ SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/meson-${PV}.tar.gz \ | |||
15 | file://0001-Make-CPU-family-warnings-fatal.patch \ | 15 | file://0001-Make-CPU-family-warnings-fatal.patch \ |
16 | file://0002-Support-building-allarch-recipes-again.patch \ | 16 | file://0002-Support-building-allarch-recipes-again.patch \ |
17 | " | 17 | " |
18 | SRC_URI[sha256sum] = "6020568bdede1643d4fb41e28215be38eff5d52da28ac7d125457c59e0032ad7" | 18 | SRC_URI[sha256sum] = "8fd6630c25c27f1489a8a0392b311a60481a3c161aa699b330e25935b750138d" |
19 | UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)$" | 19 | UPSTREAM_CHECK_REGEX = "(?P<pver>\d+(\.\d+)+)$" |
20 | 20 | ||
21 | inherit python_setuptools_build_meta github-releases | 21 | inherit python_setuptools_build_meta github-releases |
@@ -86,7 +86,7 @@ ar = ${@meson_array('BUILD_AR', d)} | |||
86 | nm = ${@meson_array('BUILD_NM', d)} | 86 | nm = ${@meson_array('BUILD_NM', d)} |
87 | strip = ${@meson_array('BUILD_STRIP', d)} | 87 | strip = ${@meson_array('BUILD_STRIP', d)} |
88 | readelf = ${@meson_array('BUILD_READELF', d)} | 88 | readelf = ${@meson_array('BUILD_READELF', d)} |
89 | pkgconfig = 'pkg-config-native' | 89 | pkg-config = 'pkg-config-native' |
90 | 90 | ||
91 | [built-in options] | 91 | [built-in options] |
92 | c_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}] | 92 | c_args = ['-isystem@{OECORE_NATIVE_SYSROOT}${includedir_native}' , ${@var_list2str('BUILD_OPTIMIZATION', d)}] |
@@ -104,7 +104,7 @@ cpp = @CXX | |||
104 | ar = @AR | 104 | ar = @AR |
105 | nm = @NM | 105 | nm = @NM |
106 | strip = @STRIP | 106 | strip = @STRIP |
107 | pkgconfig = 'pkg-config' | 107 | pkg-config = 'pkg-config' |
108 | 108 | ||
109 | [built-in options] | 109 | [built-in options] |
110 | c_args = @CFLAGS | 110 | c_args = @CFLAGS |
diff --git a/meta/recipes-devtools/mmc/mmc-utils_git.bb b/meta/recipes-devtools/mmc/mmc-utils_git.bb index 7c6be93a9c..0bfd5c1cc8 100644 --- a/meta/recipes-devtools/mmc/mmc-utils_git.bb +++ b/meta/recipes-devtools/mmc/mmc-utils_git.bb | |||
@@ -1,5 +1,5 @@ | |||
1 | SUMMARY = "Userspace tools for MMC/SD devices" | 1 | SUMMARY = "Userspace tools for MMC/SD devices" |
2 | HOMEPAGE = "http://git.kernel.org/cgit/linux/kernel/git/cjb/mmc-utils.git/" | 2 | HOMEPAGE = "https://git.kernel.org/pub/scm/utils/mmc/mmc-utils.git/" |
3 | DESCRIPTION = "${SUMMARY}" | 3 | DESCRIPTION = "${SUMMARY}" |
4 | LICENSE = "GPL-2.0-only" | 4 | LICENSE = "GPL-2.0-only" |
5 | LIC_FILES_CHKSUM = "file://mmc.c;beginline=1;endline=20;md5=fae32792e20f4d27ade1c5a762d16b7d" | 5 | LIC_FILES_CHKSUM = "file://mmc.c;beginline=1;endline=20;md5=fae32792e20f4d27ade1c5a762d16b7d" |
@@ -9,7 +9,7 @@ SRCREV = "f757f413dea4a143ad7c3b48b8264176f0499a82" | |||
9 | 9 | ||
10 | PV = "0.1+git" | 10 | PV = "0.1+git" |
11 | 11 | ||
12 | SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branch=${SRCBRANCH};protocol=https" | 12 | SRC_URI = "git://git.kernel.org/pub/scm/utils/mmc/mmc-utils.git;branch=${SRCBRANCH};protocol=https" |
13 | UPSTREAM_CHECK_COMMITS = "1" | 13 | UPSTREAM_CHECK_COMMITS = "1" |
14 | 14 | ||
15 | S = "${WORKDIR}/git" | 15 | S = "${WORKDIR}/git" |
diff --git a/meta/recipes-devtools/ninja/ninja_1.12.0.bb b/meta/recipes-devtools/ninja/ninja_1.12.1.bb index ef7f80d0ae..9f5c014b9b 100644 --- a/meta/recipes-devtools/ninja/ninja_1.12.0.bb +++ b/meta/recipes-devtools/ninja/ninja_1.12.1.bb | |||
@@ -6,7 +6,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a81586a64ad4e476c791cda7e2f2c52e" | |||
6 | 6 | ||
7 | DEPENDS = "re2c-native ninja-native" | 7 | DEPENDS = "re2c-native ninja-native" |
8 | 8 | ||
9 | SRCREV = "65d0dfcbbea6b8ca7d8a3a0f673ecb522379e43c" | 9 | SRCREV = "2daa09ba270b0a43e1929d29b073348aa985dfaa" |
10 | 10 | ||
11 | SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https" | 11 | SRC_URI = "git://github.com/ninja-build/ninja.git;branch=release;protocol=https" |
12 | UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)" | 12 | UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>.*)" |
diff --git a/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb b/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb index 726a259a8c..4db35c1092 100644 --- a/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb +++ b/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb | |||
@@ -3,7 +3,8 @@ HOMEPAGE = "https://git.yoctoproject.org/opkg/" | |||
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 4 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
5 | 5 | ||
6 | S = "${WORKDIR}" | 6 | S = "${WORKDIR}/sources" |
7 | UNPACKDIR = "${S}" | ||
7 | 8 | ||
8 | do_compile() { | 9 | do_compile() { |
9 | mkdir -p ${S}/${sysconfdir}/opkg/ | 10 | mkdir -p ${S}/${sysconfdir}/opkg/ |
diff --git a/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb b/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb index b41c182fad..48bda7e4b6 100644 --- a/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb +++ b/meta/recipes-devtools/perl-cross/perlcross_1.5.2.bb | |||
@@ -6,7 +6,7 @@ SECTION = "devel" | |||
6 | LICENSE = "Artistic-1.0 | GPL-1.0-or-later" | 6 | LICENSE = "Artistic-1.0 | GPL-1.0-or-later" |
7 | # README.md is taken from https://github.com/arsv/perl-cross/blob/master/README.md | 7 | # README.md is taken from https://github.com/arsv/perl-cross/blob/master/README.md |
8 | # but is not provided inside the release tarballs | 8 | # but is not provided inside the release tarballs |
9 | LIC_FILES_CHKSUM = "file://${WORKDIR}/README.md;md5=252fcce2026b765fee1ad74d2fb07a3b" | 9 | LIC_FILES_CHKSUM = "file://${UNPACKDIR}/README.md;md5=252fcce2026b765fee1ad74d2fb07a3b" |
10 | 10 | ||
11 | inherit allarch github-releases | 11 | inherit allarch github-releases |
12 | 12 | ||
diff --git a/meta/recipes-devtools/perl/files/determinism.patch b/meta/recipes-devtools/perl/files/determinism.patch index aa85ccef10..f2b1111552 100644 --- a/meta/recipes-devtools/perl/files/determinism.patch +++ b/meta/recipes-devtools/perl/files/determinism.patch | |||
@@ -8,9 +8,9 @@ b) Sort the order of the module lists from configure_mods.sh since otherwise | |||
8 | the result isn't the same leading to makefile differences. | 8 | the result isn't the same leading to makefile differences. |
9 | Reported upstream: https://github.com/arsv/perl-cross/issues/88 | 9 | Reported upstream: https://github.com/arsv/perl-cross/issues/88 |
10 | 10 | ||
11 | c) Sort the Encode::Byte byte_t.fnm file output (and the makefile depends whilst | 11 | c) Sort the Encode::Byte byte_t.fnm file output (and the makefile depends whilst |
12 | there for good measure) | 12 | there for good measure) |
13 | This needs to go to upstream perl (not done) | 13 | Submitted to upstream perl: https://github.com/dankogai/p5-encode/pull/179 |
14 | 14 | ||
15 | d) Use bash for perl-cross configure since otherwise trnl gets set to "\n" with bash | 15 | d) Use bash for perl-cross configure since otherwise trnl gets set to "\n" with bash |
16 | and "" with dash | 16 | and "" with dash |
@@ -18,7 +18,7 @@ d) Use bash for perl-cross configure since otherwise trnl gets set to "\n" with | |||
18 | 18 | ||
19 | RP 2020/2/7 | 19 | RP 2020/2/7 |
20 | 20 | ||
21 | Upstream-Status: Pending [75% submitted] | 21 | Upstream-Status: Submitted [see links above] |
22 | Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org | 22 | Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org |
23 | 23 | ||
24 | Index: perl-5.30.1/cpan/Encode/Byte/Makefile.PL | 24 | Index: perl-5.30.1/cpan/Encode/Byte/Makefile.PL |
diff --git a/meta/recipes-devtools/perl/perl_5.38.2.bb b/meta/recipes-devtools/perl/perl_5.38.2.bb index b6c9cda7ae..63909c242d 100644 --- a/meta/recipes-devtools/perl/perl_5.38.2.bb +++ b/meta/recipes-devtools/perl/perl_5.38.2.bb | |||
@@ -331,7 +331,7 @@ python split_perl_packages () { | |||
331 | d.setVar(d.expand("RDEPENDS:${PN}-modules"), ' '.join(packages)) | 331 | d.setVar(d.expand("RDEPENDS:${PN}-modules"), ' '.join(packages)) |
332 | 332 | ||
333 | # Read the pre-generated dependency file, and use it to set module dependecies | 333 | # Read the pre-generated dependency file, and use it to set module dependecies |
334 | for line in open(d.expand("${WORKDIR}") + '/perl-rdepends.txt').readlines(): | 334 | for line in open(d.getVar("UNPACKDIR") + '/perl-rdepends.txt').readlines(): |
335 | splitline = line.split() | 335 | splitline = line.split() |
336 | # Filter empty lines and comments | 336 | # Filter empty lines and comments |
337 | if len(splitline) == 0 or splitline[0].startswith("#"): | 337 | if len(splitline) == 0 or splitline[0].startswith("#"): |
diff --git a/meta/recipes-devtools/pseudo/pseudo_git.bb b/meta/recipes-devtools/pseudo/pseudo_git.bb index c70b509233..5f32b3777a 100644 --- a/meta/recipes-devtools/pseudo/pseudo_git.bb +++ b/meta/recipes-devtools/pseudo/pseudo_git.bb | |||
@@ -14,7 +14,7 @@ SRC_URI:append:class-nativesdk = " \ | |||
14 | file://older-glibc-symbols.patch" | 14 | file://older-glibc-symbols.patch" |
15 | SRC_URI[prebuilt.sha256sum] = "ed9f456856e9d86359f169f46a70ad7be4190d6040282b84c8d97b99072485aa" | 15 | SRC_URI[prebuilt.sha256sum] = "ed9f456856e9d86359f169f46a70ad7be4190d6040282b84c8d97b99072485aa" |
16 | 16 | ||
17 | SRCREV = "0d292df61aeb886ae8ca33d9edc3b6d0ff5c0f0f" | 17 | SRCREV = "e11ae91da7d0711f5e33ea9dfbf1875dde3c1734" |
18 | S = "${WORKDIR}/git" | 18 | S = "${WORKDIR}/git" |
19 | PV = "1.9.0+git" | 19 | PV = "1.9.0+git" |
20 | 20 | ||
diff --git a/meta/recipes-devtools/python/python3-bcrypt-crates.inc b/meta/recipes-devtools/python/python3-bcrypt-crates.inc index 0e7479f0b5..7d9e1f3b59 100644 --- a/meta/recipes-devtools/python/python3-bcrypt-crates.inc +++ b/meta/recipes-devtools/python/python3-bcrypt-crates.inc | |||
@@ -2,113 +2,115 @@ | |||
2 | 2 | ||
3 | # from src/_bcrypt/Cargo.lock | 3 | # from src/_bcrypt/Cargo.lock |
4 | SRC_URI += " \ | 4 | SRC_URI += " \ |
5 | crate://crates.io/autocfg/1.1.0 \ | 5 | crate://crates.io/autocfg/1.3.0 \ |
6 | crate://crates.io/base64/0.21.5 \ | 6 | crate://crates.io/base64/0.22.1 \ |
7 | crate://crates.io/bcrypt/0.15.0 \ | 7 | crate://crates.io/bcrypt/0.15.1 \ |
8 | crate://crates.io/bcrypt-pbkdf/0.10.0 \ | 8 | crate://crates.io/bcrypt-pbkdf/0.10.0 \ |
9 | crate://crates.io/bitflags/1.3.2 \ | 9 | crate://crates.io/bitflags/2.5.0 \ |
10 | crate://crates.io/block-buffer/0.10.4 \ | 10 | crate://crates.io/block-buffer/0.10.4 \ |
11 | crate://crates.io/blowfish/0.9.1 \ | 11 | crate://crates.io/blowfish/0.9.1 \ |
12 | crate://crates.io/byteorder/1.5.0 \ | 12 | crate://crates.io/byteorder/1.5.0 \ |
13 | crate://crates.io/cfg-if/1.0.0 \ | 13 | crate://crates.io/cfg-if/1.0.0 \ |
14 | crate://crates.io/cipher/0.4.4 \ | 14 | crate://crates.io/cipher/0.4.4 \ |
15 | crate://crates.io/cpufeatures/0.2.11 \ | 15 | crate://crates.io/cpufeatures/0.2.12 \ |
16 | crate://crates.io/crypto-common/0.1.6 \ | 16 | crate://crates.io/crypto-common/0.1.6 \ |
17 | crate://crates.io/digest/0.10.7 \ | 17 | crate://crates.io/digest/0.10.7 \ |
18 | crate://crates.io/generic-array/0.14.7 \ | 18 | crate://crates.io/generic-array/0.14.7 \ |
19 | crate://crates.io/getrandom/0.2.11 \ | 19 | crate://crates.io/getrandom/0.2.14 \ |
20 | crate://crates.io/heck/0.4.1 \ | 20 | crate://crates.io/heck/0.4.1 \ |
21 | crate://crates.io/indoc/2.0.4 \ | 21 | crate://crates.io/indoc/2.0.5 \ |
22 | crate://crates.io/inout/0.1.3 \ | 22 | crate://crates.io/inout/0.1.3 \ |
23 | crate://crates.io/libc/0.2.151 \ | 23 | crate://crates.io/libc/0.2.154 \ |
24 | crate://crates.io/lock_api/0.4.11 \ | 24 | crate://crates.io/lock_api/0.4.12 \ |
25 | crate://crates.io/memoffset/0.9.0 \ | 25 | crate://crates.io/memoffset/0.9.1 \ |
26 | crate://crates.io/once_cell/1.19.0 \ | 26 | crate://crates.io/once_cell/1.19.0 \ |
27 | crate://crates.io/parking_lot/0.12.1 \ | 27 | crate://crates.io/parking_lot/0.12.2 \ |
28 | crate://crates.io/parking_lot_core/0.9.9 \ | 28 | crate://crates.io/parking_lot_core/0.9.10 \ |
29 | crate://crates.io/pbkdf2/0.12.2 \ | 29 | crate://crates.io/pbkdf2/0.12.2 \ |
30 | crate://crates.io/portable-atomic/1.6.0 \ | 30 | crate://crates.io/portable-atomic/1.6.0 \ |
31 | crate://crates.io/proc-macro2/1.0.70 \ | 31 | crate://crates.io/proc-macro2/1.0.81 \ |
32 | crate://crates.io/pyo3/0.20.3 \ | 32 | crate://crates.io/pyo3/0.21.2 \ |
33 | crate://crates.io/pyo3-build-config/0.20.3 \ | 33 | crate://crates.io/pyo3-build-config/0.21.2 \ |
34 | crate://crates.io/pyo3-ffi/0.20.3 \ | 34 | crate://crates.io/pyo3-ffi/0.21.2 \ |
35 | crate://crates.io/pyo3-macros/0.20.3 \ | 35 | crate://crates.io/pyo3-macros/0.21.2 \ |
36 | crate://crates.io/pyo3-macros-backend/0.20.3 \ | 36 | crate://crates.io/pyo3-macros-backend/0.21.2 \ |
37 | crate://crates.io/quote/1.0.33 \ | 37 | crate://crates.io/quote/1.0.36 \ |
38 | crate://crates.io/redox_syscall/0.4.1 \ | 38 | crate://crates.io/redox_syscall/0.5.1 \ |
39 | crate://crates.io/scopeguard/1.2.0 \ | 39 | crate://crates.io/scopeguard/1.2.0 \ |
40 | crate://crates.io/sha2/0.10.8 \ | 40 | crate://crates.io/sha2/0.10.8 \ |
41 | crate://crates.io/smallvec/1.11.2 \ | 41 | crate://crates.io/smallvec/1.13.2 \ |
42 | crate://crates.io/subtle/2.5.0 \ | 42 | crate://crates.io/subtle/2.5.0 \ |
43 | crate://crates.io/syn/2.0.41 \ | 43 | crate://crates.io/syn/2.0.60 \ |
44 | crate://crates.io/target-lexicon/0.12.12 \ | 44 | crate://crates.io/target-lexicon/0.12.14 \ |
45 | crate://crates.io/typenum/1.17.0 \ | 45 | crate://crates.io/typenum/1.17.0 \ |
46 | crate://crates.io/unicode-ident/1.0.12 \ | 46 | crate://crates.io/unicode-ident/1.0.12 \ |
47 | crate://crates.io/unindent/0.2.3 \ | 47 | crate://crates.io/unindent/0.2.3 \ |
48 | crate://crates.io/version_check/0.9.4 \ | 48 | crate://crates.io/version_check/0.9.4 \ |
49 | crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \ | 49 | crate://crates.io/wasi/0.11.0+wasi-snapshot-preview1 \ |
50 | crate://crates.io/windows-targets/0.48.5 \ | 50 | crate://crates.io/windows-targets/0.52.5 \ |
51 | crate://crates.io/windows_aarch64_gnullvm/0.48.5 \ | 51 | crate://crates.io/windows_aarch64_gnullvm/0.52.5 \ |
52 | crate://crates.io/windows_aarch64_msvc/0.48.5 \ | 52 | crate://crates.io/windows_aarch64_msvc/0.52.5 \ |
53 | crate://crates.io/windows_i686_gnu/0.48.5 \ | 53 | crate://crates.io/windows_i686_gnu/0.52.5 \ |
54 | crate://crates.io/windows_i686_msvc/0.48.5 \ | 54 | crate://crates.io/windows_i686_gnullvm/0.52.5 \ |
55 | crate://crates.io/windows_x86_64_gnu/0.48.5 \ | 55 | crate://crates.io/windows_i686_msvc/0.52.5 \ |
56 | crate://crates.io/windows_x86_64_gnullvm/0.48.5 \ | 56 | crate://crates.io/windows_x86_64_gnu/0.52.5 \ |
57 | crate://crates.io/windows_x86_64_msvc/0.48.5 \ | 57 | crate://crates.io/windows_x86_64_gnullvm/0.52.5 \ |
58 | crate://crates.io/windows_x86_64_msvc/0.52.5 \ | ||
58 | crate://crates.io/zeroize/1.7.0 \ | 59 | crate://crates.io/zeroize/1.7.0 \ |
59 | " | 60 | " |
60 | 61 | ||
61 | SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" | 62 | SRC_URI[autocfg-1.3.0.sha256sum] = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" |
62 | SRC_URI[base64-0.21.5.sha256sum] = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" | 63 | SRC_URI[base64-0.22.1.sha256sum] = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" |
63 | SRC_URI[bcrypt-0.15.0.sha256sum] = "28d1c9c15093eb224f0baa400f38fcd713fc1391a6f1c389d886beef146d60a3" | 64 | SRC_URI[bcrypt-0.15.1.sha256sum] = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7" |
64 | SRC_URI[bcrypt-pbkdf-0.10.0.sha256sum] = "6aeac2e1fe888769f34f05ac343bbef98b14d1ffb292ab69d4608b3abc86f2a2" | 65 | SRC_URI[bcrypt-pbkdf-0.10.0.sha256sum] = "6aeac2e1fe888769f34f05ac343bbef98b14d1ffb292ab69d4608b3abc86f2a2" |
65 | SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" | 66 | SRC_URI[bitflags-2.5.0.sha256sum] = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" |
66 | SRC_URI[block-buffer-0.10.4.sha256sum] = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" | 67 | SRC_URI[block-buffer-0.10.4.sha256sum] = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" |
67 | SRC_URI[blowfish-0.9.1.sha256sum] = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" | 68 | SRC_URI[blowfish-0.9.1.sha256sum] = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" |
68 | SRC_URI[byteorder-1.5.0.sha256sum] = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" | 69 | SRC_URI[byteorder-1.5.0.sha256sum] = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" |
69 | SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" | 70 | SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" |
70 | SRC_URI[cipher-0.4.4.sha256sum] = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" | 71 | SRC_URI[cipher-0.4.4.sha256sum] = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" |
71 | SRC_URI[cpufeatures-0.2.11.sha256sum] = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" | 72 | SRC_URI[cpufeatures-0.2.12.sha256sum] = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" |
72 | SRC_URI[crypto-common-0.1.6.sha256sum] = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" | 73 | SRC_URI[crypto-common-0.1.6.sha256sum] = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" |
73 | SRC_URI[digest-0.10.7.sha256sum] = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" | 74 | SRC_URI[digest-0.10.7.sha256sum] = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" |
74 | SRC_URI[generic-array-0.14.7.sha256sum] = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" | 75 | SRC_URI[generic-array-0.14.7.sha256sum] = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" |
75 | SRC_URI[getrandom-0.2.11.sha256sum] = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" | 76 | SRC_URI[getrandom-0.2.14.sha256sum] = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" |
76 | SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" | 77 | SRC_URI[heck-0.4.1.sha256sum] = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" |
77 | SRC_URI[indoc-2.0.4.sha256sum] = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" | 78 | SRC_URI[indoc-2.0.5.sha256sum] = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" |
78 | SRC_URI[inout-0.1.3.sha256sum] = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" | 79 | SRC_URI[inout-0.1.3.sha256sum] = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" |
79 | SRC_URI[libc-0.2.151.sha256sum] = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" | 80 | SRC_URI[libc-0.2.154.sha256sum] = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" |
80 | SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" | 81 | SRC_URI[lock_api-0.4.12.sha256sum] = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" |
81 | SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" | 82 | SRC_URI[memoffset-0.9.1.sha256sum] = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" |
82 | SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" | 83 | SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" |
83 | SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" | 84 | SRC_URI[parking_lot-0.12.2.sha256sum] = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb" |
84 | SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" | 85 | SRC_URI[parking_lot_core-0.9.10.sha256sum] = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" |
85 | SRC_URI[pbkdf2-0.12.2.sha256sum] = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" | 86 | SRC_URI[pbkdf2-0.12.2.sha256sum] = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" |
86 | SRC_URI[portable-atomic-1.6.0.sha256sum] = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" | 87 | SRC_URI[portable-atomic-1.6.0.sha256sum] = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" |
87 | SRC_URI[proc-macro2-1.0.70.sha256sum] = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" | 88 | SRC_URI[proc-macro2-1.0.81.sha256sum] = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" |
88 | SRC_URI[pyo3-0.20.3.sha256sum] = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" | 89 | SRC_URI[pyo3-0.21.2.sha256sum] = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8" |
89 | SRC_URI[pyo3-build-config-0.20.3.sha256sum] = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" | 90 | SRC_URI[pyo3-build-config-0.21.2.sha256sum] = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50" |
90 | SRC_URI[pyo3-ffi-0.20.3.sha256sum] = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" | 91 | SRC_URI[pyo3-ffi-0.21.2.sha256sum] = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403" |
91 | SRC_URI[pyo3-macros-0.20.3.sha256sum] = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" | 92 | SRC_URI[pyo3-macros-0.21.2.sha256sum] = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c" |
92 | SRC_URI[pyo3-macros-backend-0.20.3.sha256sum] = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" | 93 | SRC_URI[pyo3-macros-backend-0.21.2.sha256sum] = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c" |
93 | SRC_URI[quote-1.0.33.sha256sum] = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" | 94 | SRC_URI[quote-1.0.36.sha256sum] = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" |
94 | SRC_URI[redox_syscall-0.4.1.sha256sum] = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" | 95 | SRC_URI[redox_syscall-0.5.1.sha256sum] = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" |
95 | SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" | 96 | SRC_URI[scopeguard-1.2.0.sha256sum] = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" |
96 | SRC_URI[sha2-0.10.8.sha256sum] = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" | 97 | SRC_URI[sha2-0.10.8.sha256sum] = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" |
97 | SRC_URI[smallvec-1.11.2.sha256sum] = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" | 98 | SRC_URI[smallvec-1.13.2.sha256sum] = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" |
98 | SRC_URI[subtle-2.5.0.sha256sum] = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" | 99 | SRC_URI[subtle-2.5.0.sha256sum] = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" |
99 | SRC_URI[syn-2.0.41.sha256sum] = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" | 100 | SRC_URI[syn-2.0.60.sha256sum] = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" |
100 | SRC_URI[target-lexicon-0.12.12.sha256sum] = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a" | 101 | SRC_URI[target-lexicon-0.12.14.sha256sum] = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" |
101 | SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" | 102 | SRC_URI[typenum-1.17.0.sha256sum] = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" |
102 | SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" | 103 | SRC_URI[unicode-ident-1.0.12.sha256sum] = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" |
103 | SRC_URI[unindent-0.2.3.sha256sum] = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" | 104 | SRC_URI[unindent-0.2.3.sha256sum] = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" |
104 | SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" | 105 | SRC_URI[version_check-0.9.4.sha256sum] = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" |
105 | SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" | 106 | SRC_URI[wasi-0.11.0+wasi-snapshot-preview1.sha256sum] = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" |
106 | SRC_URI[windows-targets-0.48.5.sha256sum] = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" | 107 | SRC_URI[windows-targets-0.52.5.sha256sum] = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" |
107 | SRC_URI[windows_aarch64_gnullvm-0.48.5.sha256sum] = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" | 108 | SRC_URI[windows_aarch64_gnullvm-0.52.5.sha256sum] = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" |
108 | SRC_URI[windows_aarch64_msvc-0.48.5.sha256sum] = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" | 109 | SRC_URI[windows_aarch64_msvc-0.52.5.sha256sum] = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" |
109 | SRC_URI[windows_i686_gnu-0.48.5.sha256sum] = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" | 110 | SRC_URI[windows_i686_gnu-0.52.5.sha256sum] = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" |
110 | SRC_URI[windows_i686_msvc-0.48.5.sha256sum] = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" | 111 | SRC_URI[windows_i686_gnullvm-0.52.5.sha256sum] = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" |
111 | SRC_URI[windows_x86_64_gnu-0.48.5.sha256sum] = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" | 112 | SRC_URI[windows_i686_msvc-0.52.5.sha256sum] = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" |
112 | SRC_URI[windows_x86_64_gnullvm-0.48.5.sha256sum] = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" | 113 | SRC_URI[windows_x86_64_gnu-0.52.5.sha256sum] = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" |
113 | SRC_URI[windows_x86_64_msvc-0.48.5.sha256sum] = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" | 114 | SRC_URI[windows_x86_64_gnullvm-0.52.5.sha256sum] = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" |
115 | SRC_URI[windows_x86_64_msvc-0.52.5.sha256sum] = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" | ||
114 | SRC_URI[zeroize-1.7.0.sha256sum] = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" | 116 | SRC_URI[zeroize-1.7.0.sha256sum] = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" |
diff --git a/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch b/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch deleted file mode 100644 index 3f671fcc98..0000000000 --- a/meta/recipes-devtools/python/python3-bcrypt/0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch +++ /dev/null | |||
@@ -1,111 +0,0 @@ | |||
1 | From cfdd98b3215cc12e66190a9c7f0a32c052e3c2e7 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Mon, 26 Feb 2024 18:26:30 -0800 | ||
4 | Subject: [PATCH] Bump pyo3 from 0.20.0 to 0.20.3 in /src/_bcrypt (#746) | ||
5 | |||
6 | It fixes build on hosts without 64bit atomics | ||
7 | |||
8 | Upstream-Status: Backport [https://github.com/pyca/bcrypt/commit/c2ef9350798ab59b18e8a0e04a01389858578fe0] | ||
9 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
10 | --- | ||
11 | src/_bcrypt/Cargo.lock | 22 ++++++++++++++-------- | ||
12 | src/_bcrypt/Cargo.toml | 2 +- | ||
13 | 2 files changed, 15 insertions(+), 9 deletions(-) | ||
14 | |||
15 | --- a/src/_bcrypt/Cargo.lock | ||
16 | +++ b/src/_bcrypt/Cargo.lock | ||
17 | @@ -233,6 +233,12 @@ dependencies = [ | ||
18 | ] | ||
19 | |||
20 | [[package]] | ||
21 | +name = "portable-atomic" | ||
22 | +version = "1.6.0" | ||
23 | +source = "registry+https://github.com/rust-lang/crates.io-index" | ||
24 | +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" | ||
25 | + | ||
26 | +[[package]] | ||
27 | name = "proc-macro2" | ||
28 | version = "1.0.70" | ||
29 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
30 | @@ -243,15 +249,16 @@ dependencies = [ | ||
31 | |||
32 | [[package]] | ||
33 | name = "pyo3" | ||
34 | -version = "0.20.0" | ||
35 | +version = "0.20.3" | ||
36 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
37 | -checksum = "04e8453b658fe480c3e70c8ed4e3d3ec33eb74988bd186561b0cc66b85c3bc4b" | ||
38 | +checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" | ||
39 | dependencies = [ | ||
40 | "cfg-if", | ||
41 | "indoc", | ||
42 | "libc", | ||
43 | "memoffset", | ||
44 | "parking_lot", | ||
45 | + "portable-atomic", | ||
46 | "pyo3-build-config", | ||
47 | "pyo3-ffi", | ||
48 | "pyo3-macros", | ||
49 | @@ -260,9 +267,9 @@ dependencies = [ | ||
50 | |||
51 | [[package]] | ||
52 | name = "pyo3-build-config" | ||
53 | -version = "0.20.0" | ||
54 | +version = "0.20.3" | ||
55 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
56 | -checksum = "a96fe70b176a89cff78f2fa7b3c930081e163d5379b4dcdf993e3ae29ca662e5" | ||
57 | +checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" | ||
58 | dependencies = [ | ||
59 | "once_cell", | ||
60 | "target-lexicon", | ||
61 | @@ -270,9 +277,9 @@ dependencies = [ | ||
62 | |||
63 | [[package]] | ||
64 | name = "pyo3-ffi" | ||
65 | -version = "0.20.0" | ||
66 | +version = "0.20.3" | ||
67 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
68 | -checksum = "214929900fd25e6604661ed9cf349727c8920d47deff196c4e28165a6ef2a96b" | ||
69 | +checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" | ||
70 | dependencies = [ | ||
71 | "libc", | ||
72 | "pyo3-build-config", | ||
73 | @@ -280,9 +287,9 @@ dependencies = [ | ||
74 | |||
75 | [[package]] | ||
76 | name = "pyo3-macros" | ||
77 | -version = "0.20.0" | ||
78 | +version = "0.20.3" | ||
79 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
80 | -checksum = "dac53072f717aa1bfa4db832b39de8c875b7c7af4f4a6fe93cdbf9264cf8383b" | ||
81 | +checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" | ||
82 | dependencies = [ | ||
83 | "proc-macro2", | ||
84 | "pyo3-macros-backend", | ||
85 | @@ -292,12 +299,13 @@ dependencies = [ | ||
86 | |||
87 | [[package]] | ||
88 | name = "pyo3-macros-backend" | ||
89 | -version = "0.20.0" | ||
90 | +version = "0.20.3" | ||
91 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
92 | -checksum = "7774b5a8282bd4f25f803b1f0d945120be959a36c72e08e7cd031c792fdfd424" | ||
93 | +checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" | ||
94 | dependencies = [ | ||
95 | "heck", | ||
96 | "proc-macro2", | ||
97 | + "pyo3-build-config", | ||
98 | "quote", | ||
99 | "syn", | ||
100 | ] | ||
101 | --- a/src/_bcrypt/Cargo.toml | ||
102 | +++ b/src/_bcrypt/Cargo.toml | ||
103 | @@ -6,7 +6,7 @@ edition = "2018" | ||
104 | publish = false | ||
105 | |||
106 | [dependencies] | ||
107 | -pyo3 = { version = "0.20.0", features = ["abi3"] } | ||
108 | +pyo3 = { version = "0.20.3", features = ["abi3"] } | ||
109 | bcrypt = "0.15" | ||
110 | bcrypt-pbkdf = "0.10.0" | ||
111 | base64 = "0.21.5" | ||
diff --git a/meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb b/meta/recipes-devtools/python/python3-bcrypt_4.1.3.bb index 93fa645f33..deb5cbdee3 100644 --- a/meta/recipes-devtools/python/python3-bcrypt_4.1.2.bb +++ b/meta/recipes-devtools/python/python3-bcrypt_4.1.3.bb | |||
@@ -6,12 +6,11 @@ HOMEPAGE = "https://pypi.org/project/bcrypt/" | |||
6 | DEPENDS += "python3-cffi-native" | 6 | DEPENDS += "python3-cffi-native" |
7 | LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', ' -fuse-ld=bfd', '', d)}" | 7 | LDFLAGS:append = "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', ' -fuse-ld=bfd', '', d)}" |
8 | 8 | ||
9 | SRC_URI[sha256sum] = "33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258" | 9 | SRC_URI[sha256sum] = "2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623" |
10 | 10 | ||
11 | inherit pypi python_setuptools3_rust ptest-cargo cargo-update-recipe-crates | 11 | inherit pypi python_setuptools3_rust ptest-cargo cargo-update-recipe-crates |
12 | 12 | ||
13 | SRC_URI += " \ | 13 | SRC_URI += " \ |
14 | file://0001-Bump-pyo3-from-0.20.0-to-0.20.3-in-src-_bcrypt-746.patch \ | ||
15 | file://run-ptest \ | 14 | file://run-ptest \ |
16 | " | 15 | " |
17 | 16 | ||
@@ -33,5 +32,4 @@ RDEPENDS:${PN}:class-target += "\ | |||
33 | python3-cffi \ | 32 | python3-cffi \ |
34 | python3-ctypes \ | 33 | python3-ctypes \ |
35 | python3-shell \ | 34 | python3-shell \ |
36 | python3-six \ | ||
37 | " | 35 | " |
diff --git a/meta/recipes-devtools/python/python3-cryptography-crates.inc b/meta/recipes-devtools/python/python3-cryptography-crates.inc index b26e22b70c..dbeda05dc4 100644 --- a/meta/recipes-devtools/python/python3-cryptography-crates.inc +++ b/meta/recipes-devtools/python/python3-cryptography-crates.inc | |||
@@ -18,9 +18,9 @@ SRC_URI += " \ | |||
18 | crate://crates.io/lock_api/0.4.11 \ | 18 | crate://crates.io/lock_api/0.4.11 \ |
19 | crate://crates.io/memoffset/0.9.0 \ | 19 | crate://crates.io/memoffset/0.9.0 \ |
20 | crate://crates.io/once_cell/1.19.0 \ | 20 | crate://crates.io/once_cell/1.19.0 \ |
21 | crate://crates.io/openssl/0.10.63 \ | 21 | crate://crates.io/openssl/0.10.64 \ |
22 | crate://crates.io/openssl-macros/0.1.1 \ | 22 | crate://crates.io/openssl-macros/0.1.1 \ |
23 | crate://crates.io/openssl-sys/0.9.99 \ | 23 | crate://crates.io/openssl-sys/0.9.102 \ |
24 | crate://crates.io/parking_lot/0.12.1 \ | 24 | crate://crates.io/parking_lot/0.12.1 \ |
25 | crate://crates.io/parking_lot_core/0.9.9 \ | 25 | crate://crates.io/parking_lot_core/0.9.9 \ |
26 | crate://crates.io/pem/3.0.3 \ | 26 | crate://crates.io/pem/3.0.3 \ |
@@ -68,9 +68,9 @@ SRC_URI[libc-0.2.152.sha256sum] = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb0 | |||
68 | SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" | 68 | SRC_URI[lock_api-0.4.11.sha256sum] = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" |
69 | SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" | 69 | SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" |
70 | SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" | 70 | SRC_URI[once_cell-1.19.0.sha256sum] = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" |
71 | SRC_URI[openssl-0.10.63.sha256sum] = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" | 71 | SRC_URI[openssl-0.10.64.sha256sum] = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" |
72 | SRC_URI[openssl-macros-0.1.1.sha256sum] = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" | 72 | SRC_URI[openssl-macros-0.1.1.sha256sum] = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" |
73 | SRC_URI[openssl-sys-0.9.99.sha256sum] = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" | 73 | SRC_URI[openssl-sys-0.9.102.sha256sum] = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" |
74 | SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" | 74 | SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" |
75 | SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" | 75 | SRC_URI[parking_lot_core-0.9.9.sha256sum] = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" |
76 | SRC_URI[pem-3.0.3.sha256sum] = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" | 76 | SRC_URI[pem-3.0.3.sha256sum] = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" |
diff --git a/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb b/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.7.bb index ee522af08e..9a025a530c 100644 --- a/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.5.bb +++ b/meta/recipes-devtools/python/python3-cryptography-vectors_42.0.7.bb | |||
@@ -9,7 +9,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=8c3617db4fb6fae01f1d253ab91511e4 \ | |||
9 | # NOTE: Make sure to keep this recipe at the same version as python3-cryptography | 9 | # NOTE: Make sure to keep this recipe at the same version as python3-cryptography |
10 | # Upgrade both recipes at the same time | 10 | # Upgrade both recipes at the same time |
11 | 11 | ||
12 | SRC_URI[sha256sum] = "505cd5e3b0cb32da1526f07042b7fc38a4b6c356710cb73d2b5f76b037a38ed1" | 12 | SRC_URI[sha256sum] = "8294c632dbe2cb14c7b7e24219560e674bc2224dfc4bed577ab077dbb82bfa3c" |
13 | 13 | ||
14 | PYPI_PACKAGE = "cryptography_vectors" | 14 | PYPI_PACKAGE = "cryptography_vectors" |
15 | 15 | ||
diff --git a/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch b/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch index f9c8d1393d..ec4bfcc98b 100644 --- a/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch +++ b/meta/recipes-devtools/python/python3-cryptography/0001-pyproject.toml-remove-benchmark-disable-option.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From b7dd3ce1d75d1e6255e1aca82aa7f401d4246a75 Mon Sep 17 00:00:00 2001 | 1 | From 18d78736f5c39784d5151b08fdfdd21c61225686 Mon Sep 17 00:00:00 2001 |
2 | From: Mingli Yu <mingli.yu@windriver.com> | 2 | From: Mingli Yu <mingli.yu@windriver.com> |
3 | Date: Tue, 17 May 2022 17:22:48 +0800 | 3 | Date: Tue, 17 May 2022 17:22:48 +0800 |
4 | Subject: [PATCH] pyproject.toml: remove --benchmark-disable option | 4 | Subject: [PATCH] pyproject.toml: remove --benchmark-disable option |
@@ -26,7 +26,7 @@ Signed-off-by: Tim Orling <tim.orling@konsulko.com> | |||
26 | 1 file changed, 1 insertion(+), 1 deletion(-) | 26 | 1 file changed, 1 insertion(+), 1 deletion(-) |
27 | 27 | ||
28 | diff --git a/pyproject.toml b/pyproject.toml | 28 | diff --git a/pyproject.toml b/pyproject.toml |
29 | index c9a7979bd..dec4b7157 100644 | 29 | index 4f0fa36..0d54ea6 100644 |
30 | --- a/pyproject.toml | 30 | --- a/pyproject.toml |
31 | +++ b/pyproject.toml | 31 | +++ b/pyproject.toml |
32 | @@ -92,7 +92,7 @@ rust-version = ">=1.63.0" | 32 | @@ -92,7 +92,7 @@ rust-version = ">=1.63.0" |
@@ -38,6 +38,3 @@ index c9a7979bd..dec4b7157 100644 | |||
38 | console_output_style = "progress-even-when-capture-no" | 38 | console_output_style = "progress-even-when-capture-no" |
39 | markers = [ | 39 | markers = [ |
40 | "skip_fips: this test is not executed in FIPS mode", | 40 | "skip_fips: this test is not executed in FIPS mode", |
41 | -- | ||
42 | 2.34.1 | ||
43 | |||
diff --git a/meta/recipes-devtools/python/python3-cryptography_42.0.5.bb b/meta/recipes-devtools/python/python3-cryptography_42.0.7.bb index 732f925d92..5964400a04 100644 --- a/meta/recipes-devtools/python/python3-cryptography_42.0.5.bb +++ b/meta/recipes-devtools/python/python3-cryptography_42.0.7.bb | |||
@@ -8,7 +8,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=8c3617db4fb6fae01f1d253ab91511e4 \ | |||
8 | " | 8 | " |
9 | LDSHARED += "-pthread" | 9 | LDSHARED += "-pthread" |
10 | 10 | ||
11 | SRC_URI[sha256sum] = "6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1" | 11 | SRC_URI[sha256sum] = "ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2" |
12 | 12 | ||
13 | SRC_URI += "file://0001-pyproject.toml-remove-benchmark-disable-option.patch \ | 13 | SRC_URI += "file://0001-pyproject.toml-remove-benchmark-disable-option.patch \ |
14 | file://check-memfree.py \ | 14 | file://check-memfree.py \ |
@@ -49,7 +49,7 @@ RDEPENDS:${PN}-ptest += " \ | |||
49 | inherit ptest | 49 | inherit ptest |
50 | 50 | ||
51 | do_install_ptest() { | 51 | do_install_ptest() { |
52 | install -D ${WORKDIR}/check-memfree.py ${D}${PTEST_PATH}/ | 52 | install -D ${UNPACKDIR}/check-memfree.py ${D}${PTEST_PATH}/ |
53 | install -d ${D}${PTEST_PATH}/tests | 53 | install -d ${D}${PTEST_PATH}/tests |
54 | cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/ | 54 | cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/ |
55 | # remove test_x509.py as it needs benchmark and we don't | 55 | # remove test_x509.py as it needs benchmark and we don't |
diff --git a/meta/recipes-devtools/python/python3-hatchling_1.24.1.bb b/meta/recipes-devtools/python/python3-hatchling_1.24.2.bb index fc8d953281..0ad545f448 100644 --- a/meta/recipes-devtools/python/python3-hatchling_1.24.1.bb +++ b/meta/recipes-devtools/python/python3-hatchling_1.24.2.bb | |||
@@ -8,7 +8,7 @@ inherit pypi python_hatchling | |||
8 | DEPENDS += "python3-pluggy-native python3-pathspec-native python3-packaging-native python3-editables-native python3-trove-classifiers-native" | 8 | DEPENDS += "python3-pluggy-native python3-pathspec-native python3-packaging-native python3-editables-native python3-trove-classifiers-native" |
9 | DEPENDS:remove:class-native = "python3-hatchling-native" | 9 | DEPENDS:remove:class-native = "python3-hatchling-native" |
10 | 10 | ||
11 | SRC_URI[sha256sum] = "51f861891e98c4044eb455163a737e5d2328d7aa74890b182db2d80fee22a497" | 11 | SRC_URI[sha256sum] = "41ddc27cdb25db9ef7b68bef075f829c84cb349aa1bff8240797d012510547b0" |
12 | 12 | ||
13 | do_compile:prepend() { | 13 | do_compile:prepend() { |
14 | export PYTHONPATH=src | 14 | export PYTHONPATH=src |
diff --git a/meta/recipes-devtools/python/python3-hypothesis_6.100.1.bb b/meta/recipes-devtools/python/python3-hypothesis_6.102.4.bb index af7facfe7e..ec43514d41 100644 --- a/meta/recipes-devtools/python/python3-hypothesis_6.100.1.bb +++ b/meta/recipes-devtools/python/python3-hypothesis_6.102.4.bb | |||
@@ -13,7 +13,7 @@ SRC_URI += " \ | |||
13 | file://test_rle.py \ | 13 | file://test_rle.py \ |
14 | " | 14 | " |
15 | 15 | ||
16 | SRC_URI[sha256sum] = "ebff09d7fa4f1fb6a855a812baf17e578b4481b7b70ec6d96496210d1a4c6c35" | 16 | SRC_URI[sha256sum] = "59b4d144346d5cffb482cc1bafbd21b13ff31608e8c4b3e4630339aee3e87763" |
17 | 17 | ||
18 | RDEPENDS:${PN} += " \ | 18 | RDEPENDS:${PN} += " \ |
19 | python3-attrs \ | 19 | python3-attrs \ |
@@ -32,8 +32,8 @@ RDEPENDS:${PN}-ptest += " \ | |||
32 | 32 | ||
33 | do_install_ptest() { | 33 | do_install_ptest() { |
34 | install -d ${D}${PTEST_PATH}/examples | 34 | install -d ${D}${PTEST_PATH}/examples |
35 | install -m 0755 ${WORKDIR}/test_binary_search.py ${D}${PTEST_PATH}/examples/ | 35 | install -m 0755 ${UNPACKDIR}/test_binary_search.py ${D}${PTEST_PATH}/examples/ |
36 | install -m 0755 ${WORKDIR}/test_rle.py ${D}${PTEST_PATH}/examples/ | 36 | install -m 0755 ${UNPACKDIR}/test_rle.py ${D}${PTEST_PATH}/examples/ |
37 | } | 37 | } |
38 | 38 | ||
39 | BBCLASSEXTEND = "native nativesdk" | 39 | BBCLASSEXTEND = "native nativesdk" |
diff --git a/meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb b/meta/recipes-devtools/python/python3-jsonschema_4.22.0.bb index 381148f4bb..c3184396d2 100644 --- a/meta/recipes-devtools/python/python3-jsonschema_4.21.1.bb +++ b/meta/recipes-devtools/python/python3-jsonschema_4.22.0.bb | |||
@@ -4,7 +4,7 @@ LICENSE = "MIT" | |||
4 | LIC_FILES_CHKSUM = "file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 \ | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 \ |
5 | file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af" | 5 | file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af" |
6 | 6 | ||
7 | SRC_URI[sha256sum] = "85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5" | 7 | SRC_URI[sha256sum] = "5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7" |
8 | 8 | ||
9 | inherit pypi python_hatchling | 9 | inherit pypi python_hatchling |
10 | 10 | ||
diff --git a/meta/recipes-devtools/python/python3-lxml_5.2.1.bb b/meta/recipes-devtools/python/python3-lxml_5.2.2.bb index 44a10505f3..e5a22e7090 100644 --- a/meta/recipes-devtools/python/python3-lxml_5.2.1.bb +++ b/meta/recipes-devtools/python/python3-lxml_5.2.2.bb | |||
@@ -18,7 +18,7 @@ LIC_FILES_CHKSUM = "file://LICENSES.txt;md5=e4c045ebad958ead4b48008f70838403 \ | |||
18 | 18 | ||
19 | DEPENDS += "libxml2 libxslt" | 19 | DEPENDS += "libxml2 libxslt" |
20 | 20 | ||
21 | SRC_URI[sha256sum] = "3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306" | 21 | SRC_URI[sha256sum] = "bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87" |
22 | 22 | ||
23 | SRC_URI += "${PYPI_SRC_URI}" | 23 | SRC_URI += "${PYPI_SRC_URI}" |
24 | inherit pkgconfig pypi setuptools3 | 24 | inherit pkgconfig pypi setuptools3 |
diff --git a/meta/recipes-devtools/python/python3-mako_1.3.3.bb b/meta/recipes-devtools/python/python3-mako_1.3.5.bb index 7545cbef99..d27fb121a6 100644 --- a/meta/recipes-devtools/python/python3-mako_1.3.3.bb +++ b/meta/recipes-devtools/python/python3-mako_1.3.5.bb | |||
@@ -8,7 +8,7 @@ PYPI_PACKAGE = "Mako" | |||
8 | 8 | ||
9 | inherit pypi python_setuptools_build_meta | 9 | inherit pypi python_setuptools_build_meta |
10 | 10 | ||
11 | SRC_URI[sha256sum] = "e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73" | 11 | SRC_URI[sha256sum] = "48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc" |
12 | 12 | ||
13 | RDEPENDS:${PN} = "python3-html \ | 13 | RDEPENDS:${PN} = "python3-html \ |
14 | python3-markupsafe \ | 14 | python3-markupsafe \ |
diff --git a/meta/recipes-devtools/python/python3-pycparser_2.22.bb b/meta/recipes-devtools/python/python3-pycparser_2.22.bb index 9a5abc42bd..0be39ea383 100644 --- a/meta/recipes-devtools/python/python3-pycparser_2.22.bb +++ b/meta/recipes-devtools/python/python3-pycparser_2.22.bb | |||
@@ -11,9 +11,7 @@ BBCLASSEXTEND = "native nativesdk" | |||
11 | 11 | ||
12 | RDEPENDS:${PN}:class-target += "\ | 12 | RDEPENDS:${PN}:class-target += "\ |
13 | python3-netclient \ | 13 | python3-netclient \ |
14 | python3-ply \ | 14 | " |
15 | python3-pprint \ | ||
16 | " | ||
17 | 15 | ||
18 | RSUGGESTS:${PN}:class-target += "\ | 16 | RSUGGESTS:${PN}:class-target += "\ |
19 | cpp \ | 17 | cpp \ |
diff --git a/meta/recipes-devtools/python/python3-pygments_2.17.2.bb b/meta/recipes-devtools/python/python3-pygments_2.18.0.bb index 8b98064b78..fd37365681 100644 --- a/meta/recipes-devtools/python/python3-pygments_2.17.2.bb +++ b/meta/recipes-devtools/python/python3-pygments_2.18.0.bb | |||
@@ -5,7 +5,7 @@ LICENSE = "BSD-2-Clause" | |||
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=36a13c90514e2899f1eba7f41c3ee592" | 5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=36a13c90514e2899f1eba7f41c3ee592" |
6 | 6 | ||
7 | inherit python_hatchling | 7 | inherit python_hatchling |
8 | SRC_URI[sha256sum] = "da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367" | 8 | SRC_URI[sha256sum] = "786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199" |
9 | 9 | ||
10 | UPSTREAM_CHECK_PYPI_PACKAGE = "Pygments" | 10 | UPSTREAM_CHECK_PYPI_PACKAGE = "Pygments" |
11 | inherit pypi | 11 | inherit pypi |
diff --git a/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb b/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb index e714ad838e..9ef3145bf8 100644 --- a/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb +++ b/meta/recipes-devtools/python/python3-pyopenssl_24.1.0.bb | |||
@@ -15,7 +15,6 @@ FILES:${PN}-tests = "${libdir}/${PYTHON_DIR}/site-packages/OpenSSL/test" | |||
15 | 15 | ||
16 | RDEPENDS:${PN}:class-target = " \ | 16 | RDEPENDS:${PN}:class-target = " \ |
17 | python3-cryptography \ | 17 | python3-cryptography \ |
18 | python3-six \ | ||
19 | python3-threading \ | 18 | python3-threading \ |
20 | " | 19 | " |
21 | RDEPENDS:${PN}-tests = "${PN}" | 20 | RDEPENDS:${PN}-tests = "${PN}" |
diff --git a/meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb b/meta/recipes-devtools/python/python3-pyproject-metadata_0.8.0.bb index 8b9549f3d0..8d00826f86 100644 --- a/meta/recipes-devtools/python/python3-pyproject-metadata_0.7.1.bb +++ b/meta/recipes-devtools/python/python3-pyproject-metadata_0.8.0.bb | |||
@@ -12,11 +12,11 @@ HOMEPAGE = "https://github.com/FFY00/python-pyproject-metadata" | |||
12 | LICENSE = "MIT" | 12 | LICENSE = "MIT" |
13 | LIC_FILES_CHKSUM = "file://LICENSE;md5=310439af287b0fb4780b2ad6907c256c" | 13 | LIC_FILES_CHKSUM = "file://LICENSE;md5=310439af287b0fb4780b2ad6907c256c" |
14 | 14 | ||
15 | PYPI_PACKAGE = "pyproject-metadata" | 15 | PYPI_PACKAGE = "pyproject_metadata" |
16 | 16 | ||
17 | inherit pypi python_setuptools_build_meta | 17 | inherit pypi python_setuptools_build_meta |
18 | 18 | ||
19 | SRC_URI[sha256sum] = "0a94f18b108b9b21f3a26a3d541f056c34edcb17dc872a144a15618fed7aef67" | 19 | SRC_URI[sha256sum] = "376d5a00764ac29440a54579f88e66b7d9cb7e629d35c35a1c7248bfebc9b455" |
20 | 20 | ||
21 | RDEPENDS:${PN} += " \ | 21 | RDEPENDS:${PN} += " \ |
22 | python3-logging \ | 22 | python3-logging \ |
diff --git a/meta/recipes-devtools/python/python3-pytest_8.2.0.bb b/meta/recipes-devtools/python/python3-pytest_8.2.1.bb index 1e6674079f..28dbe786f8 100644 --- a/meta/recipes-devtools/python/python3-pytest_8.2.0.bb +++ b/meta/recipes-devtools/python/python3-pytest_8.2.1.bb | |||
@@ -5,7 +5,7 @@ DESCRIPTION = "The pytest framework makes it easy to write small tests, yet scal | |||
5 | LICENSE = "MIT" | 5 | LICENSE = "MIT" |
6 | LIC_FILES_CHKSUM = "file://LICENSE;md5=bd27e41b6550fe0fc45356d1d81ee37c" | 6 | LIC_FILES_CHKSUM = "file://LICENSE;md5=bd27e41b6550fe0fc45356d1d81ee37c" |
7 | 7 | ||
8 | SRC_URI[sha256sum] = "d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f" | 8 | SRC_URI[sha256sum] = "5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd" |
9 | 9 | ||
10 | DEPENDS += "python3-setuptools-scm-native" | 10 | DEPENDS += "python3-setuptools-scm-native" |
11 | 11 | ||
diff --git a/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb b/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb index 3388312557..6e2022719c 100644 --- a/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb +++ b/meta/recipes-devtools/python/python3-pyyaml_6.0.1.bb | |||
@@ -34,7 +34,7 @@ RDEPENDS:${PN}-ptest += " \ | |||
34 | 34 | ||
35 | do_install_ptest() { | 35 | do_install_ptest() { |
36 | install -d ${D}${PTEST_PATH}/tests | 36 | install -d ${D}${PTEST_PATH}/tests |
37 | cp -rf ${WORKDIR}/test_dump_load.py ${D}${PTEST_PATH}/tests/ | 37 | cp -rf ${UNPACKDIR}/test_dump_load.py ${D}${PTEST_PATH}/tests/ |
38 | } | 38 | } |
39 | 39 | ||
40 | BBCLASSEXTEND = "native nativesdk" | 40 | BBCLASSEXTEND = "native nativesdk" |
diff --git a/meta/recipes-devtools/python/python3-referencing_0.34.0.bb b/meta/recipes-devtools/python/python3-referencing_0.35.1.bb index 6fbd10d9cf..3e2a380607 100644 --- a/meta/recipes-devtools/python/python3-referencing_0.34.0.bb +++ b/meta/recipes-devtools/python/python3-referencing_0.35.1.bb | |||
@@ -3,7 +3,7 @@ HOMEPAGE = "https://github.com/python-jsonschema/referencing" | |||
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | LIC_FILES_CHKSUM = "file://COPYING;md5=93eb9740964b59e9ba30281255b044e2" | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=93eb9740964b59e9ba30281255b044e2" |
5 | 5 | ||
6 | SRC_URI[sha256sum] = "5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844" | 6 | SRC_URI[sha256sum] = "25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c" |
7 | 7 | ||
8 | inherit pypi python_hatchling | 8 | inherit pypi python_hatchling |
9 | 9 | ||
diff --git a/meta/recipes-devtools/python/python3-requests_2.31.0.bb b/meta/recipes-devtools/python/python3-requests_2.32.1.bb index df48cd54c3..a1144181bc 100644 --- a/meta/recipes-devtools/python/python3-requests_2.31.0.bb +++ b/meta/recipes-devtools/python/python3-requests_2.32.1.bb | |||
@@ -1,19 +1,17 @@ | |||
1 | SUMMARY = "Python HTTP for Humans." | 1 | SUMMARY = "Python HTTP for Humans." |
2 | HOMEPAGE = "http://python-requests.org" | 2 | HOMEPAGE = "https://requests.readthedocs.io" |
3 | LICENSE = "Apache-2.0" | 3 | LICENSE = "Apache-2.0" |
4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=34400b68072d710fecd0a2940a0d1658" | 4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=34400b68072d710fecd0a2940a0d1658" |
5 | 5 | ||
6 | SRC_URI[sha256sum] = "942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" | 6 | SRC_URI[sha256sum] = "eb97e87e64c79e64e5b8ac75cee9dd1f97f49e289b083ee6be96268930725685" |
7 | 7 | ||
8 | inherit pypi setuptools3 | 8 | inherit pypi python_setuptools_build_meta |
9 | 9 | ||
10 | RDEPENDS:${PN} += " \ | 10 | RDEPENDS:${PN} += " \ |
11 | python3-certifi \ | ||
11 | python3-email \ | 12 | python3-email \ |
12 | python3-json \ | 13 | python3-json \ |
13 | python3-ndg-httpsclient \ | ||
14 | python3-netserver \ | 14 | python3-netserver \ |
15 | python3-pyasn1 \ | ||
16 | python3-pyopenssl \ | ||
17 | python3-pysocks \ | 15 | python3-pysocks \ |
18 | python3-urllib3 \ | 16 | python3-urllib3 \ |
19 | python3-chardet \ | 17 | python3-chardet \ |
diff --git a/meta/recipes-devtools/python/python3-rpds-py-crates.inc b/meta/recipes-devtools/python/python3-rpds-py-crates.inc index b9048bde51..3551bdb5f7 100644 --- a/meta/recipes-devtools/python/python3-rpds-py-crates.inc +++ b/meta/recipes-devtools/python/python3-rpds-py-crates.inc | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | # from Cargo.lock | 3 | # from Cargo.lock |
4 | SRC_URI += " \ | 4 | SRC_URI += " \ |
5 | crate://crates.io/archery/1.1.0 \ | 5 | crate://crates.io/archery/1.2.0 \ |
6 | crate://crates.io/autocfg/1.1.0 \ | 6 | crate://crates.io/autocfg/1.1.0 \ |
7 | crate://crates.io/bitflags/1.3.2 \ | 7 | crate://crates.io/bitflags/1.3.2 \ |
8 | crate://crates.io/cfg-if/1.0.0 \ | 8 | crate://crates.io/cfg-if/1.0.0 \ |
@@ -14,12 +14,13 @@ SRC_URI += " \ | |||
14 | crate://crates.io/once_cell/1.18.0 \ | 14 | crate://crates.io/once_cell/1.18.0 \ |
15 | crate://crates.io/parking_lot/0.12.1 \ | 15 | crate://crates.io/parking_lot/0.12.1 \ |
16 | crate://crates.io/parking_lot_core/0.9.8 \ | 16 | crate://crates.io/parking_lot_core/0.9.8 \ |
17 | crate://crates.io/portable-atomic/1.6.0 \ | ||
17 | crate://crates.io/proc-macro2/1.0.66 \ | 18 | crate://crates.io/proc-macro2/1.0.66 \ |
18 | crate://crates.io/pyo3/0.20.2 \ | 19 | crate://crates.io/pyo3/0.20.3 \ |
19 | crate://crates.io/pyo3-build-config/0.20.2 \ | 20 | crate://crates.io/pyo3-build-config/0.20.3 \ |
20 | crate://crates.io/pyo3-ffi/0.20.2 \ | 21 | crate://crates.io/pyo3-ffi/0.20.3 \ |
21 | crate://crates.io/pyo3-macros/0.20.2 \ | 22 | crate://crates.io/pyo3-macros/0.20.3 \ |
22 | crate://crates.io/pyo3-macros-backend/0.20.2 \ | 23 | crate://crates.io/pyo3-macros-backend/0.20.3 \ |
23 | crate://crates.io/quote/1.0.31 \ | 24 | crate://crates.io/quote/1.0.31 \ |
24 | crate://crates.io/redox_syscall/0.3.5 \ | 25 | crate://crates.io/redox_syscall/0.3.5 \ |
25 | crate://crates.io/rpds/1.1.0 \ | 26 | crate://crates.io/rpds/1.1.0 \ |
@@ -41,7 +42,7 @@ SRC_URI += " \ | |||
41 | crate://crates.io/windows_x86_64_msvc/0.48.0 \ | 42 | crate://crates.io/windows_x86_64_msvc/0.48.0 \ |
42 | " | 43 | " |
43 | 44 | ||
44 | SRC_URI[archery-1.1.0.sha256sum] = "487955f60962765486ce000015a3492ca45c34a2ebbf12bc0aa2b5110ca6e7d2" | 45 | SRC_URI[archery-1.2.0.sha256sum] = "8967cd1cc9e9e1954f644e14fbd6042fe9a37da96c52a67e44a2ac18261f8561" |
45 | SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" | 46 | SRC_URI[autocfg-1.1.0.sha256sum] = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" |
46 | SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" | 47 | SRC_URI[bitflags-1.3.2.sha256sum] = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" |
47 | SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" | 48 | SRC_URI[cfg-if-1.0.0.sha256sum] = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" |
@@ -53,12 +54,13 @@ SRC_URI[memoffset-0.9.0.sha256sum] = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff | |||
53 | SRC_URI[once_cell-1.18.0.sha256sum] = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" | 54 | SRC_URI[once_cell-1.18.0.sha256sum] = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" |
54 | SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" | 55 | SRC_URI[parking_lot-0.12.1.sha256sum] = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" |
55 | SRC_URI[parking_lot_core-0.9.8.sha256sum] = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" | 56 | SRC_URI[parking_lot_core-0.9.8.sha256sum] = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" |
57 | SRC_URI[portable-atomic-1.6.0.sha256sum] = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" | ||
56 | SRC_URI[proc-macro2-1.0.66.sha256sum] = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" | 58 | SRC_URI[proc-macro2-1.0.66.sha256sum] = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" |
57 | SRC_URI[pyo3-0.20.2.sha256sum] = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0" | 59 | SRC_URI[pyo3-0.20.3.sha256sum] = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233" |
58 | SRC_URI[pyo3-build-config-0.20.2.sha256sum] = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be" | 60 | SRC_URI[pyo3-build-config-0.20.3.sha256sum] = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7" |
59 | SRC_URI[pyo3-ffi-0.20.2.sha256sum] = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1" | 61 | SRC_URI[pyo3-ffi-0.20.3.sha256sum] = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa" |
60 | SRC_URI[pyo3-macros-0.20.2.sha256sum] = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3" | 62 | SRC_URI[pyo3-macros-0.20.3.sha256sum] = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158" |
61 | SRC_URI[pyo3-macros-backend-0.20.2.sha256sum] = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f" | 63 | SRC_URI[pyo3-macros-backend-0.20.3.sha256sum] = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185" |
62 | SRC_URI[quote-1.0.31.sha256sum] = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" | 64 | SRC_URI[quote-1.0.31.sha256sum] = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" |
63 | SRC_URI[redox_syscall-0.3.5.sha256sum] = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" | 65 | SRC_URI[redox_syscall-0.3.5.sha256sum] = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" |
64 | SRC_URI[rpds-1.1.0.sha256sum] = "a0e15515d3ce3313324d842629ea4905c25a13f81953eadb88f85516f59290a4" | 66 | SRC_URI[rpds-1.1.0.sha256sum] = "a0e15515d3ce3313324d842629ea4905c25a13f81953eadb88f85516f59290a4" |
diff --git a/meta/recipes-devtools/python/python3-rpds-py/run-ptest b/meta/recipes-devtools/python/python3-rpds-py/run-ptest new file mode 100644 index 0000000000..8d2017d39c --- /dev/null +++ b/meta/recipes-devtools/python/python3-rpds-py/run-ptest | |||
@@ -0,0 +1,3 @@ | |||
1 | #!/bin/sh | ||
2 | |||
3 | pytest --automake | ||
diff --git a/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb b/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb deleted file mode 100644 index cece2cb8cc..0000000000 --- a/meta/recipes-devtools/python/python3-rpds-py_0.18.0.bb +++ /dev/null | |||
@@ -1,15 +0,0 @@ | |||
1 | SUMMARY = "Python bindings to the Rust rpds crate for persistent data structures." | ||
2 | HOMEPAGE = "https://pypi.org/project/rpds-py/" | ||
3 | |||
4 | LICENSE = "MIT" | ||
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=7767fa537c4596c54141f32882c4a984" | ||
6 | |||
7 | SRC_URI[sha256sum] = "42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d" | ||
8 | |||
9 | require ${BPN}-crates.inc | ||
10 | |||
11 | inherit pypi cargo-update-recipe-crates python_maturin | ||
12 | |||
13 | PYPI_PACKAGE = "rpds_py" | ||
14 | |||
15 | BBCLASSEXTEND = "native nativesdk" | ||
diff --git a/meta/recipes-devtools/python/python3-rpds-py_0.18.1.bb b/meta/recipes-devtools/python/python3-rpds-py_0.18.1.bb new file mode 100644 index 0000000000..f46df1115c --- /dev/null +++ b/meta/recipes-devtools/python/python3-rpds-py_0.18.1.bb | |||
@@ -0,0 +1,30 @@ | |||
1 | SUMMARY = "Python bindings to the Rust rpds crate for persistent data structures." | ||
2 | HOMEPAGE = "https://pypi.org/project/rpds-py/" | ||
3 | |||
4 | LICENSE = "MIT" | ||
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=7767fa537c4596c54141f32882c4a984" | ||
6 | |||
7 | SRC_URI += "file://run-ptest" | ||
8 | |||
9 | SRC_URI[sha256sum] = "dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f" | ||
10 | |||
11 | require ${BPN}-crates.inc | ||
12 | |||
13 | inherit pypi cargo-update-recipe-crates python_maturin ptest | ||
14 | |||
15 | PYPI_PACKAGE = "rpds_py" | ||
16 | |||
17 | RDEPENDS:${PN}-ptest += " \ | ||
18 | python3-iniconfig \ | ||
19 | python3-packaging \ | ||
20 | python3-pluggy \ | ||
21 | python3-pytest \ | ||
22 | python3-unittest-automake-output \ | ||
23 | " | ||
24 | |||
25 | do_install_ptest() { | ||
26 | install -d ${D}${PTEST_PATH}/tests | ||
27 | cp -rf ${S}/tests/* ${D}${PTEST_PATH}/tests/ | ||
28 | } | ||
29 | |||
30 | BBCLASSEXTEND = "native nativesdk" | ||
diff --git a/meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb b/meta/recipes-devtools/python/python3-setuptools-scm_8.1.0.bb index 64b5050c3b..ea65b476fc 100644 --- a/meta/recipes-devtools/python/python3-setuptools-scm_8.0.4.bb +++ b/meta/recipes-devtools/python/python3-setuptools-scm_8.1.0.bb | |||
@@ -6,7 +6,9 @@ argument or in a SCM managed file." | |||
6 | LICENSE = "MIT" | 6 | LICENSE = "MIT" |
7 | LIC_FILES_CHKSUM = "file://LICENSE;md5=838c366f69b72c5df05c96dff79b35f2" | 7 | LIC_FILES_CHKSUM = "file://LICENSE;md5=838c366f69b72c5df05c96dff79b35f2" |
8 | 8 | ||
9 | SRC_URI[sha256sum] = "b5f43ff6800669595193fd09891564ee9d1d7dcb196cab4b2506d53a2e1c95c7" | 9 | SRC_URI[sha256sum] = "42dea1b65771cba93b7a515d65a65d8246e560768a66b9106a592c8e7f26c8a7" |
10 | |||
11 | PYPI_PACKAGE = "setuptools_scm" | ||
10 | 12 | ||
11 | inherit pypi python_setuptools_build_meta | 13 | inherit pypi python_setuptools_build_meta |
12 | 14 | ||
diff --git a/meta/recipes-devtools/python/python3-sphinx_7.2.6.bb b/meta/recipes-devtools/python/python3-sphinx_7.3.7.bb index f4ed0720ff..a1cae2abc0 100644 --- a/meta/recipes-devtools/python/python3-sphinx_7.2.6.bb +++ b/meta/recipes-devtools/python/python3-sphinx_7.3.7.bb | |||
@@ -2,9 +2,9 @@ SUMMARY = "Python documentation generator" | |||
2 | HOMEPAGE = "http://sphinx-doc.org/" | 2 | HOMEPAGE = "http://sphinx-doc.org/" |
3 | SECTION = "devel/python" | 3 | SECTION = "devel/python" |
4 | LICENSE = "BSD-2-Clause & MIT" | 4 | LICENSE = "BSD-2-Clause & MIT" |
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=5eb6ac1b115a1ed24a12d9f15b633993" | 5 | LIC_FILES_CHKSUM = "file://LICENSE.rst;md5=b6c681698874edd2eb9eaff7cc9de642" |
6 | 6 | ||
7 | SRC_URI[sha256sum] = "9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5" | 7 | SRC_URI[sha256sum] = "a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc" |
8 | 8 | ||
9 | inherit python_flit_core pypi | 9 | inherit python_flit_core pypi |
10 | UPSTREAM_CHECK_REGEX = "/Sphinx/(?P<pver>(\d+[\.\-_]*)+)/" | 10 | UPSTREAM_CHECK_REGEX = "/Sphinx/(?P<pver>(\d+[\.\-_]*)+)/" |
diff --git a/meta/recipes-devtools/python/python3-trove-classifiers_2024.4.10.bb b/meta/recipes-devtools/python/python3-trove-classifiers_2024.5.17.bb index 357b428983..94c48ed102 100644 --- a/meta/recipes-devtools/python/python3-trove-classifiers_2024.4.10.bb +++ b/meta/recipes-devtools/python/python3-trove-classifiers_2024.5.17.bb | |||
@@ -3,7 +3,9 @@ HOMEPAGE = "https://github.com/pypa/trove-classifiers" | |||
3 | LICENSE = "Apache-2.0" | 3 | LICENSE = "Apache-2.0" |
4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327" | 4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327" |
5 | 5 | ||
6 | SRC_URI[sha256sum] = "49f40bb6a746b72a1cba4f8d55ee8252169cda0f70802e3fd24f04b7fb25a492" | 6 | SRC_URI[sha256sum] = "d47a6f1c48803091c3fc81f535fecfeef65b558f2b9e4e83df7a79d17bce8bbf" |
7 | |||
8 | PYPI_PACKAGE = "trove_classifiers" | ||
7 | 9 | ||
8 | inherit pypi python_setuptools_build_meta ptest | 10 | inherit pypi python_setuptools_build_meta ptest |
9 | 11 | ||
diff --git a/meta/recipes-devtools/python/python3-zipp_3.18.1.bb b/meta/recipes-devtools/python/python3-zipp_3.18.2.bb index e43432469d..97aa1fed25 100644 --- a/meta/recipes-devtools/python/python3-zipp_3.18.1.bb +++ b/meta/recipes-devtools/python/python3-zipp_3.18.2.bb | |||
@@ -3,7 +3,7 @@ HOMEPAGE = "https://github.com/jaraco/zipp" | |||
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=141643e11c48898150daa83802dbc65f" | 4 | LIC_FILES_CHKSUM = "file://LICENSE;md5=141643e11c48898150daa83802dbc65f" |
5 | 5 | ||
6 | SRC_URI[sha256sum] = "2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715" | 6 | SRC_URI[sha256sum] = "6278d9ddbcfb1f1089a88fde84481528b07b0e10474e09dcfe53dad4069fa059" |
7 | 7 | ||
8 | DEPENDS += "python3-setuptools-scm-native" | 8 | DEPENDS += "python3-setuptools-scm-native" |
9 | 9 | ||
diff --git a/meta/recipes-devtools/python/python3/0001-gh-107811-tarfile-treat-overflow-in-UID-GID-as-failu.patch b/meta/recipes-devtools/python/python3/0001-gh-107811-tarfile-treat-overflow-in-UID-GID-as-failu.patch new file mode 100644 index 0000000000..88b84c6024 --- /dev/null +++ b/meta/recipes-devtools/python/python3/0001-gh-107811-tarfile-treat-overflow-in-UID-GID-as-failu.patch | |||
@@ -0,0 +1,40 @@ | |||
1 | From 999d4e74d34afa233ad8ad0c70b989d77a21957f Mon Sep 17 00:00:00 2001 | ||
2 | From: Petr Viktorin <encukou@gmail.com> | ||
3 | Date: Wed, 23 Aug 2023 20:00:07 +0200 | ||
4 | Subject: [PATCH] gh-107811: tarfile: treat overflow in UID/GID as failure to | ||
5 | set it (#108369) | ||
6 | |||
7 | Upstream-Status: Backport [https://github.com/python/cpython/pull/108369] | ||
8 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
9 | --- | ||
10 | Lib/tarfile.py | 3 ++- | ||
11 | .../Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst | 3 +++ | ||
12 | 2 files changed, 5 insertions(+), 1 deletion(-) | ||
13 | create mode 100644 Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst | ||
14 | |||
15 | diff --git a/Lib/tarfile.py b/Lib/tarfile.py | ||
16 | index 3bbbcaa..473167d 100755 | ||
17 | --- a/Lib/tarfile.py | ||
18 | +++ b/Lib/tarfile.py | ||
19 | @@ -2557,7 +2557,8 @@ class TarFile(object): | ||
20 | os.lchown(targetpath, u, g) | ||
21 | else: | ||
22 | os.chown(targetpath, u, g) | ||
23 | - except OSError as e: | ||
24 | + except (OSError, OverflowError) as e: | ||
25 | + # OverflowError can be raised if an ID doesn't fit in `id_t` | ||
26 | raise ExtractError("could not change owner") from e | ||
27 | |||
28 | def chmod(self, tarinfo, targetpath): | ||
29 | diff --git a/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst b/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst | ||
30 | new file mode 100644 | ||
31 | index 0000000..ffca413 | ||
32 | --- /dev/null | ||
33 | +++ b/Misc/NEWS.d/next/Library/2023-08-23-17-34-39.gh-issue-107811.3Fng72.rst | ||
34 | @@ -0,0 +1,3 @@ | ||
35 | +:mod:`tarfile`: extraction of members with overly large UID or GID (e.g. on | ||
36 | +an OS with 32-bit :c:type:`!id_t`) now fails in the same way as failing to | ||
37 | +set the ID. | ||
38 | -- | ||
39 | 2.45.0 | ||
40 | |||
diff --git a/meta/recipes-devtools/python/python3_3.12.3.bb b/meta/recipes-devtools/python/python3_3.12.3.bb index 93709b207c..37afa442fa 100644 --- a/meta/recipes-devtools/python/python3_3.12.3.bb +++ b/meta/recipes-devtools/python/python3_3.12.3.bb | |||
@@ -31,6 +31,7 @@ SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \ | |||
31 | file://0001-test_storlines-skip-due-to-load-variability.patch \ | 31 | file://0001-test_storlines-skip-due-to-load-variability.patch \ |
32 | file://0001-gh-114492-Initialize-struct-termios-before-calling-t.patch \ | 32 | file://0001-gh-114492-Initialize-struct-termios-before-calling-t.patch \ |
33 | file://0001-test_shutdown-skip-problematic-test.patch \ | 33 | file://0001-test_shutdown-skip-problematic-test.patch \ |
34 | file://0001-gh-107811-tarfile-treat-overflow-in-UID-GID-as-failu.patch \ | ||
34 | " | 35 | " |
35 | 36 | ||
36 | SRC_URI:append:class-native = " \ | 37 | SRC_URI:append:class-native = " \ |
diff --git a/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb b/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb index 997f72e6e7..90bba84b03 100644 --- a/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb +++ b/meta/recipes-devtools/qemu/nativesdk-qemu-helper_1.0.bb | |||
@@ -18,7 +18,8 @@ SRC_URI = "file://${COREBASE}/scripts/runqemu \ | |||
18 | file://${COREBASE}/scripts/runqemu-export-rootfs \ | 18 | file://${COREBASE}/scripts/runqemu-export-rootfs \ |
19 | " | 19 | " |
20 | 20 | ||
21 | S = "${WORKDIR}" | 21 | S = "${WORKDIR}/sources" |
22 | UNPACKDIR = "${S}" | ||
22 | 23 | ||
23 | inherit nativesdk | 24 | inherit nativesdk |
24 | 25 | ||
diff --git a/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb b/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb index 8bb6ef0995..28a3b95c4e 100644 --- a/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb +++ b/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb | |||
@@ -6,7 +6,8 @@ LIC_FILES_CHKSUM = "file://${S}/qemu-oe-bridge-helper.c;endline=4;md5=ae00a3bab8 | |||
6 | 6 | ||
7 | SRC_URI = "file://qemu-oe-bridge-helper.c" | 7 | SRC_URI = "file://qemu-oe-bridge-helper.c" |
8 | 8 | ||
9 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
10 | 11 | ||
11 | inherit native | 12 | inherit native |
12 | 13 | ||
diff --git a/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb b/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb index 97b44ad2e5..6d0e7883ad 100644 --- a/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb +++ b/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb | |||
@@ -2,7 +2,8 @@ SUMMARY = "QEMU wrapper script" | |||
2 | HOMEPAGE = "http://qemu.org" | 2 | HOMEPAGE = "http://qemu.org" |
3 | LICENSE = "MIT" | 3 | LICENSE = "MIT" |
4 | 4 | ||
5 | S = "${WORKDIR}" | 5 | S = "${WORKDIR}/sources" |
6 | UNPACKDIR = "${S}" | ||
6 | 7 | ||
7 | DEPENDS += "qemu-native" | 8 | DEPENDS += "qemu-native" |
8 | 9 | ||
diff --git a/meta/recipes-devtools/quilt/quilt.inc b/meta/recipes-devtools/quilt/quilt.inc index 3374f4dfbf..1d90d3d84d 100644 --- a/meta/recipes-devtools/quilt/quilt.inc +++ b/meta/recipes-devtools/quilt/quilt.inc | |||
@@ -67,7 +67,7 @@ DEPENDS:append:class-target = "${@bb.utils.contains('PTEST_ENABLED', '1', ' ptes | |||
67 | PACKAGE_WRITE_DEPS += "ptest-runner" | 67 | PACKAGE_WRITE_DEPS += "ptest-runner" |
68 | 68 | ||
69 | do_install_ptest() { | 69 | do_install_ptest() { |
70 | install ${WORKDIR}/test.sh ${D}${PTEST_PATH} | 70 | install ${UNPACKDIR}/test.sh ${D}${PTEST_PATH} |
71 | mkdir ${D}${PTEST_PATH}/test | 71 | mkdir ${D}${PTEST_PATH}/test |
72 | install ${S}/test/* ${D}${PTEST_PATH}/test | 72 | install ${S}/test/* ${D}${PTEST_PATH}/test |
73 | # mail needs a MTA, and the patch-wrapper is disabled | 73 | # mail needs a MTA, and the patch-wrapper is disabled |
diff --git a/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb b/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb index e3d63df852..f571e61921 100644 --- a/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb +++ b/meta/recipes-devtools/run-postinsts/run-postinsts_1.0.bb | |||
@@ -8,7 +8,8 @@ SRC_URI = "file://run-postinsts \ | |||
8 | file://run-postinsts.init \ | 8 | file://run-postinsts.init \ |
9 | file://run-postinsts.service" | 9 | file://run-postinsts.service" |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | inherit allarch systemd update-rc.d | 14 | inherit allarch systemd update-rc.d |
14 | 15 | ||
diff --git a/meta/recipes-devtools/rust/cargo_1.75.0.bb b/meta/recipes-devtools/rust/cargo_1.75.0.bb index 50b7e7c7b4..123032cdf7 100644 --- a/meta/recipes-devtools/rust/cargo_1.75.0.bb +++ b/meta/recipes-devtools/rust/cargo_1.75.0.bb | |||
@@ -22,7 +22,7 @@ inherit cargo pkgconfig | |||
22 | DEBUG_PREFIX_MAP += "-fdebug-prefix-map=${RUSTSRC}/vendor=${TARGET_DBGSRC_DIR}" | 22 | DEBUG_PREFIX_MAP += "-fdebug-prefix-map=${RUSTSRC}/vendor=${TARGET_DBGSRC_DIR}" |
23 | 23 | ||
24 | do_cargo_setup_snapshot () { | 24 | do_cargo_setup_snapshot () { |
25 | ${WORKDIR}/rust-snapshot-components/${CARGO_SNAPSHOT}/install.sh --prefix="${WORKDIR}/${CARGO_SNAPSHOT}" --disable-ldconfig | 25 | ${UNPACKDIR}/rust-snapshot-components/${CARGO_SNAPSHOT}/install.sh --prefix="${WORKDIR}/${CARGO_SNAPSHOT}" --disable-ldconfig |
26 | # Need to use uninative's loader if enabled/present since the library paths | 26 | # Need to use uninative's loader if enabled/present since the library paths |
27 | # are used internally by rust and result in symbol mismatches if we don't | 27 | # are used internally by rust and result in symbol mismatches if we don't |
28 | if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then | 28 | if [ ! -z "${UNINATIVE_LOADER}" -a -e "${UNINATIVE_LOADER}" ]; then |
diff --git a/meta/recipes-devtools/rust/files/0001-cargo-do-not-write-host-information-into-compilation.patch b/meta/recipes-devtools/rust/files/0001-cargo-do-not-write-host-information-into-compilation.patch new file mode 100644 index 0000000000..a6ee867605 --- /dev/null +++ b/meta/recipes-devtools/rust/files/0001-cargo-do-not-write-host-information-into-compilation.patch | |||
@@ -0,0 +1,51 @@ | |||
1 | From 065d7c263091118437465d714d8a29dbb6296921 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alexander Kanavin <alex@linutronix.de> | ||
3 | Date: Mon, 13 May 2024 14:57:54 +0200 | ||
4 | Subject: [PATCH] cargo: do not write host information into compilation unit | ||
5 | hashes | ||
6 | |||
7 | This breaks reproducibility in cross-builds where the cross-target | ||
8 | can be the same, but build hosts are different, as seen with | ||
9 | "rustc --version -v": | ||
10 | ... | ||
11 | host: x86_64-unknown-linux-gnu | ||
12 | |||
13 | vs. | ||
14 | |||
15 | host: aarch64-unknown-linux-gnu | ||
16 | |||
17 | This can possibly be improved by only hashing host info if the build | ||
18 | is a native one (e.g. there's no --target option passed to cargo | ||
19 | invocation) but I'm not sure how. | ||
20 | |||
21 | Upstream-Status: Inappropriate [reported at https://github.com/rust-lang/cargo/issues/13922] | ||
22 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | ||
23 | --- | ||
24 | .../src/cargo/core/compiler/context/compilation_files.rs | 4 ++-- | ||
25 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
26 | |||
27 | diff --git a/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs | ||
28 | index d83dbf10c..b2ad8d9f3 100644 | ||
29 | --- a/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs | ||
30 | +++ b/src/tools/cargo/src/cargo/core/compiler/context/compilation_files.rs | ||
31 | @@ -652,7 +652,7 @@ fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { | ||
32 | if vers.pre.is_empty() || bcx.config.cli_unstable().separate_nightlies { | ||
33 | // For stable, keep the artifacts separate. This helps if someone is | ||
34 | // testing multiple versions, to avoid recompiles. | ||
35 | - bcx.rustc().verbose_version.hash(hasher); | ||
36 | + //bcx.rustc().verbose_version.hash(hasher); | ||
37 | return; | ||
38 | } | ||
39 | // On "nightly"/"beta"/"dev"/etc, keep each "channel" separate. Don't hash | ||
40 | @@ -665,7 +665,7 @@ fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { | ||
41 | // Keep "host" since some people switch hosts to implicitly change | ||
42 | // targets, (like gnu vs musl or gnu vs msvc). In the future, we may want | ||
43 | // to consider hashing `unit.kind.short_name()` instead. | ||
44 | - bcx.rustc().host.hash(hasher); | ||
45 | + //bcx.rustc().host.hash(hasher); | ||
46 | // None of the other lines are important. Currently they are: | ||
47 | // binary: rustc <-- or "rustdoc" | ||
48 | // commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a | ||
49 | -- | ||
50 | 2.39.2 | ||
51 | |||
diff --git a/meta/recipes-devtools/rust/files/cargo-path.patch b/meta/recipes-devtools/rust/files/cargo-path.patch deleted file mode 100644 index 9a50c40220..0000000000 --- a/meta/recipes-devtools/rust/files/cargo-path.patch +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | Fix the cargo binary path error and ensure that it is fetched | ||
2 | during rustc bootstrap in rust oe-selftest. | ||
3 | |||
4 | ====================================================================== | ||
5 | ERROR: test_cargoflags (bootstrap_test.BuildBootstrap) | ||
6 | ---------------------------------------------------------------------- | ||
7 | Traceback (most recent call last): | ||
8 | File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap_test.py", line 157, in test_cargoflags | ||
9 | args, _ = self.build_args(env={"CARGOFLAGS": "--timings"}) | ||
10 | File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap_test.py", line 154, in build_args | ||
11 | return build.build_bootstrap_cmd(env), env | ||
12 | File "/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/src/bootstrap/bootstrap.py", line 960, in build_bootstrap_cmd | ||
13 | raise Exception("no cargo executable found at `{}`".format( | ||
14 | Exception: no cargo executable found at `/home/build-st/tmp/work/cortexa57-poky-linux/rust/1.74.1/rustc-1.74.1-src/build/x86_64-unknown-linux-gnu/stage0/bin/cargo` | ||
15 | |||
16 | Upstream-Status: Submitted [https://github.com/rust-lang/rust/pull/120125] | ||
17 | |||
18 | Signed-off-by: Yash Shinde <Yash.Shinde@windriver.com> | ||
19 | --- | ||
20 | diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py | ||
21 | --- a/src/bootstrap/bootstrap.py | ||
22 | +++ b/src/bootstrap/bootstrap.py | ||
23 | @@ -954,9 +954,11 @@ | ||
24 | if "RUSTFLAGS_BOOTSTRAP" in env: | ||
25 | env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"] | ||
26 | |||
27 | - env["PATH"] = os.path.join(self.bin_root(), "bin") + \ | ||
28 | - os.pathsep + env["PATH"] | ||
29 | - if not os.path.isfile(self.cargo()): | ||
30 | + cargo_bin_path = os.path.join(self.bin_root(), "bin", "cargo") | ||
31 | + if not os.path.isfile(cargo_bin_path): | ||
32 | + cargo_bin_path = os.getenv("RUST_TARGET_PATH") + "rust-snapshot/bin/cargo" | ||
33 | + env["PATH"] = os.path.dirname(cargo_bin_path) + os.pathsep + env["PATH"] | ||
34 | + else: | ||
35 | raise Exception("no cargo executable found at `{}`".format( | ||
36 | self.cargo())) | ||
37 | args = [self.cargo(), "build", "--manifest-path", | ||
diff --git a/meta/recipes-devtools/rust/rust-cross-canadian.inc b/meta/recipes-devtools/rust/rust-cross-canadian.inc index f59fee7cdd..f962437d6b 100644 --- a/meta/recipes-devtools/rust/rust-cross-canadian.inc +++ b/meta/recipes-devtools/rust/rust-cross-canadian.inc | |||
@@ -12,7 +12,8 @@ DEPENDS += "virtual/${SDK_PREFIX}gcc virtual/nativesdk-libc virtual/nativesdk-${ | |||
12 | 12 | ||
13 | SRC_URI += "file://target-rust-ccld.c" | 13 | SRC_URI += "file://target-rust-ccld.c" |
14 | LIC_FILES_CHKSUM = "file://target-rust-ccld.c;md5=af4e0e29f81a34cffe05aa07c89e93e9;endline=7" | 14 | LIC_FILES_CHKSUM = "file://target-rust-ccld.c;md5=af4e0e29f81a34cffe05aa07c89e93e9;endline=7" |
15 | S = "${WORKDIR}" | 15 | S = "${WORKDIR}/sources" |
16 | UNPACKDIR = "${S}" | ||
16 | 17 | ||
17 | # Need to use our SDK's sh here, see #14878 | 18 | # Need to use our SDK's sh here, see #14878 |
18 | create_sdk_wrapper () { | 19 | create_sdk_wrapper () { |
diff --git a/meta/recipes-devtools/rust/rust-source.inc b/meta/recipes-devtools/rust/rust-source.inc index b14221b6cb..20ef5e82bc 100644 --- a/meta/recipes-devtools/rust/rust-source.inc +++ b/meta/recipes-devtools/rust/rust-source.inc | |||
@@ -7,12 +7,12 @@ SRC_URI += "https://static.rust-lang.org/dist/rustc-${RUST_VERSION}-src.tar.xz;n | |||
7 | file://rv32-missing-syscalls.patch;patchdir=${RUSTSRC} \ | 7 | file://rv32-missing-syscalls.patch;patchdir=${RUSTSRC} \ |
8 | file://rv32-rustix-libc-backend.patch;patchdir=${RUSTSRC} \ | 8 | file://rv32-rustix-libc-backend.patch;patchdir=${RUSTSRC} \ |
9 | file://rv32-cargo-rustix-0.38.19-fix.patch;patchdir=${RUSTSRC} \ | 9 | file://rv32-cargo-rustix-0.38.19-fix.patch;patchdir=${RUSTSRC} \ |
10 | file://cargo-path.patch;patchdir=${RUSTSRC} \ | ||
11 | file://custom-target-cfg.patch;patchdir=${RUSTSRC} \ | 10 | file://custom-target-cfg.patch;patchdir=${RUSTSRC} \ |
12 | file://rustc-bootstrap.patch;patchdir=${RUSTSRC} \ | 11 | file://rustc-bootstrap.patch;patchdir=${RUSTSRC} \ |
13 | file://target-build-value.patch;patchdir=${RUSTSRC} \ | 12 | file://target-build-value.patch;patchdir=${RUSTSRC} \ |
14 | file://0001-Handle-vendored-sources-when-remapping-paths.patch;patchdir=${RUSTSRC} \ | 13 | file://0001-Handle-vendored-sources-when-remapping-paths.patch;patchdir=${RUSTSRC} \ |
15 | file://repro-issue-fix-with-v175.patch;patchdir=${RUSTSRC} \ | 14 | file://repro-issue-fix-with-v175.patch;patchdir=${RUSTSRC} \ |
15 | file://0001-cargo-do-not-write-host-information-into-compilation.patch;patchdir=${RUSTSRC} \ | ||
16 | " | 16 | " |
17 | SRC_URI[rust.sha256sum] = "4526f786d673e4859ff2afa0bab2ba13c918b796519a25c1acce06dba9542340" | 17 | SRC_URI[rust.sha256sum] = "4526f786d673e4859ff2afa0bab2ba13c918b796519a25c1acce06dba9542340" |
18 | 18 | ||
diff --git a/meta/recipes-devtools/rust/rust_1.75.0.bb b/meta/recipes-devtools/rust/rust_1.75.0.bb index 76e1fe2d84..c66c14cc5a 100644 --- a/meta/recipes-devtools/rust/rust_1.75.0.bb +++ b/meta/recipes-devtools/rust/rust_1.75.0.bb | |||
@@ -11,6 +11,11 @@ DEPENDS += "file-native python3-native" | |||
11 | DEPENDS:append:class-native = " rust-llvm-native" | 11 | DEPENDS:append:class-native = " rust-llvm-native" |
12 | DEPENDS:append:class-nativesdk = " nativesdk-rust-llvm" | 12 | DEPENDS:append:class-nativesdk = " nativesdk-rust-llvm" |
13 | 13 | ||
14 | # native rust uses cargo/rustc from binary snapshots to bootstrap | ||
15 | # but everything else should use our native builds | ||
16 | DEPENDS:append:class-target = " cargo-native rust-native" | ||
17 | DEPENDS:append:class-nativesdk = " cargo-native rust-native" | ||
18 | |||
14 | DEPENDS += "rust-llvm (=${PV})" | 19 | DEPENDS += "rust-llvm (=${PV})" |
15 | 20 | ||
16 | RDEPENDS:${PN}:append:class-target = " gcc g++ binutils" | 21 | RDEPENDS:${PN}:append:class-target = " gcc g++ binutils" |
@@ -35,8 +40,6 @@ RUST_ALTERNATE_EXE_PATH_NATIVE = "${STAGING_LIBDIR_NATIVE}/llvm-rust/bin/llvm-co | |||
35 | # own vendoring. | 40 | # own vendoring. |
36 | CARGO_DISABLE_BITBAKE_VENDORING = "1" | 41 | CARGO_DISABLE_BITBAKE_VENDORING = "1" |
37 | 42 | ||
38 | # We can't use RUST_BUILD_SYS here because that may be "musl" if | ||
39 | # TCLIBC="musl". Snapshots are always -unknown-linux-gnu | ||
40 | setup_cargo_environment () { | 43 | setup_cargo_environment () { |
41 | # The first step is to build bootstrap and some early stage tools, | 44 | # The first step is to build bootstrap and some early stage tools, |
42 | # these are build for the same target as the snapshot, e.g. | 45 | # these are build for the same target as the snapshot, e.g. |
@@ -48,14 +51,14 @@ setup_cargo_environment () { | |||
48 | inherit rust-target-config | 51 | inherit rust-target-config |
49 | 52 | ||
50 | do_rust_setup_snapshot () { | 53 | do_rust_setup_snapshot () { |
51 | for installer in "${WORKDIR}/rust-snapshot-components/"*"/install.sh"; do | 54 | for installer in "${UNPACKDIR}/rust-snapshot-components/"*"/install.sh"; do |
52 | "${installer}" --prefix="${WORKDIR}/rust-snapshot" --disable-ldconfig | 55 | "${installer}" --prefix="${WORKDIR}/rust-snapshot" --disable-ldconfig |
53 | done | 56 | done |
54 | 57 | ||
55 | # Some versions of rust (e.g. 1.18.0) tries to find cargo in stage0/bin/cargo | 58 | # Some versions of rust (e.g. 1.18.0) tries to find cargo in stage0/bin/cargo |
56 | # and fail without it there. | 59 | # and fail without it there. |
57 | mkdir -p ${RUSTSRC}/build/${BUILD_SYS} | 60 | mkdir -p ${RUSTSRC}/build/${RUST_BUILD_SYS} |
58 | ln -sf ${WORKDIR}/rust-snapshot/ ${RUSTSRC}/build/${BUILD_SYS}/stage0 | 61 | ln -sf ${WORKDIR}/rust-snapshot/ ${RUSTSRC}/build/${RUST_BUILD_SYS}/stage0 |
59 | 62 | ||
60 | # Need to use uninative's loader if enabled/present since the library paths | 63 | # Need to use uninative's loader if enabled/present since the library paths |
61 | # are used internally by rust and result in symbol mismatches if we don't | 64 | # are used internally by rust and result in symbol mismatches if we don't |
@@ -70,6 +73,11 @@ addtask do_test_compile after do_configure do_rust_gen_targets | |||
70 | do_rust_setup_snapshot[dirs] += "${WORKDIR}/rust-snapshot" | 73 | do_rust_setup_snapshot[dirs] += "${WORKDIR}/rust-snapshot" |
71 | do_rust_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER" | 74 | do_rust_setup_snapshot[vardepsexclude] += "UNINATIVE_LOADER" |
72 | 75 | ||
76 | RUSTC_BOOTSTRAP = "${STAGING_BINDIR_NATIVE}/rustc" | ||
77 | CARGO_BOOTSTRAP = "${STAGING_BINDIR_NATIVE}/cargo" | ||
78 | RUSTC_BOOTSTRAP:class-native = "${WORKDIR}/rust-snapshot/bin/rustc" | ||
79 | CARGO_BOOTSTRAP:class-native = "${WORKDIR}/rust-snapshot/bin/cargo" | ||
80 | |||
73 | python do_configure() { | 81 | python do_configure() { |
74 | import json | 82 | import json |
75 | import configparser | 83 | import configparser |
@@ -141,12 +149,11 @@ python do_configure() { | |||
141 | config.add_section("build") | 149 | config.add_section("build") |
142 | config.set("build", "submodules", e(False)) | 150 | config.set("build", "submodules", e(False)) |
143 | config.set("build", "docs", e(False)) | 151 | config.set("build", "docs", e(False)) |
144 | config.set("build", "tools", ["rust-demangler",]) | ||
145 | 152 | ||
146 | rustc = d.expand("${WORKDIR}/rust-snapshot/bin/rustc") | 153 | rustc = d.getVar('RUSTC_BOOTSTRAP') |
147 | config.set("build", "rustc", e(rustc)) | 154 | config.set("build", "rustc", e(rustc)) |
148 | 155 | ||
149 | cargo = d.expand("${WORKDIR}/rust-snapshot/bin/cargo") | 156 | cargo = d.getVar('CARGO_BOOTSTRAP') |
150 | config.set("build", "cargo", e(cargo)) | 157 | config.set("build", "cargo", e(cargo)) |
151 | 158 | ||
152 | config.set("build", "vendor", e(True)) | 159 | config.set("build", "vendor", e(True)) |
diff --git a/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch b/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch index 76ca7a76a8..86dcd97713 100644 --- a/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch +++ b/meta/recipes-devtools/strace/strace/0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 470399f3636c412b74f9daf6ae430b13c3126f02 Mon Sep 17 00:00:00 2001 | 1 | From ec543417b8e1df7c71dfc4728313849d9ab669a6 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Thu, 15 Dec 2022 15:54:27 -0800 | 3 | Date: Thu, 15 Dec 2022 15:54:27 -0800 |
4 | Subject: [PATCH] configure: Use autoconf macro to detect largefile support | 4 | Subject: [PATCH] configure: Use autoconf macro to detect largefile support |
@@ -13,10 +13,10 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
13 | 1 file changed, 2 insertions(+) | 13 | 1 file changed, 2 insertions(+) |
14 | 14 | ||
15 | diff --git a/configure.ac b/configure.ac | 15 | diff --git a/configure.ac b/configure.ac |
16 | index 4797b42dd..7d57fb254 100644 | 16 | index 36fff90..9e40ab5 100644 |
17 | --- a/configure.ac | 17 | --- a/configure.ac |
18 | +++ b/configure.ac | 18 | +++ b/configure.ac |
19 | @@ -43,6 +43,8 @@ AC_PROG_INSTALL | 19 | @@ -42,6 +42,8 @@ AC_PROG_INSTALL |
20 | AC_PROG_RANLIB | 20 | AC_PROG_RANLIB |
21 | AC_PROG_LN_S | 21 | AC_PROG_LN_S |
22 | 22 | ||
diff --git a/meta/recipes-devtools/strace/strace/0001-strace-fix-reproducibilty-issues.patch b/meta/recipes-devtools/strace/strace/0001-strace-fix-reproducibilty-issues.patch index c4c176e6bc..9cdb7c6134 100644 --- a/meta/recipes-devtools/strace/strace/0001-strace-fix-reproducibilty-issues.patch +++ b/meta/recipes-devtools/strace/strace/0001-strace-fix-reproducibilty-issues.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 6309792c49ca900cec6a7f1dc5b51bf75b629e11 Mon Sep 17 00:00:00 2001 | 1 | From d5f681c2de834110de260f4d071c7bc1fb0d7564 Mon Sep 17 00:00:00 2001 |
2 | From: Jeremy Puhlman <jpuhlman@mvista.com> | 2 | From: Jeremy Puhlman <jpuhlman@mvista.com> |
3 | Date: Wed, 11 Mar 2020 19:56:55 +0000 | 3 | Date: Wed, 11 Mar 2020 19:56:55 +0000 |
4 | Subject: [PATCH] strace: fix reproducibilty issues | 4 | Subject: [PATCH] strace: fix reproducibilty issues |
@@ -13,7 +13,7 @@ Signed-off-by: Jeremy Puhlman <jpuhlman@mvista.com> | |||
13 | 1 file changed, 2 insertions(+), 2 deletions(-) | 13 | 1 file changed, 2 insertions(+), 2 deletions(-) |
14 | 14 | ||
15 | diff --git a/tests/gen_tests.sh b/tests/gen_tests.sh | 15 | diff --git a/tests/gen_tests.sh b/tests/gen_tests.sh |
16 | index 5e1e7c9..1e65eac 100755 | 16 | index 8f47651..65267db 100755 |
17 | --- a/tests/gen_tests.sh | 17 | --- a/tests/gen_tests.sh |
18 | +++ b/tests/gen_tests.sh | 18 | +++ b/tests/gen_tests.sh |
19 | @@ -46,7 +46,7 @@ while read -r name arg0 args; do { | 19 | @@ -46,7 +46,7 @@ while read -r name arg0 args; do { |
@@ -25,7 +25,7 @@ index 5e1e7c9..1e65eac 100755 | |||
25 | 25 | ||
26 | case "$arg0" in | 26 | case "$arg0" in |
27 | +*) | 27 | +*) |
28 | @@ -80,7 +80,7 @@ while read -r name arg0 args; do { | 28 | @@ -91,7 +91,7 @@ while read -r name arg0 args; do { |
29 | 29 | ||
30 | if [ -n "$names" ]; then | 30 | if [ -n "$names" ]; then |
31 | { | 31 | { |
@@ -34,6 +34,3 @@ index 5e1e7c9..1e65eac 100755 | |||
34 | printf 'GEN_TESTS =' | 34 | printf 'GEN_TESTS =' |
35 | printf ' %s.gen.test' $names | 35 | printf ' %s.gen.test' $names |
36 | echo | 36 | echo |
37 | -- | ||
38 | 2.24.1 | ||
39 | |||
diff --git a/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch b/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch index 0cabdfe99f..fc6ed7ac4d 100644 --- a/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch +++ b/meta/recipes-devtools/strace/strace/0002-tests-Replace-off64_t-with-off_t.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 579b2ebe52d4b97f954e6188df2d07e137820075 Mon Sep 17 00:00:00 2001 | 1 | From 1891db87da3b204c512f47155aaa4e692008f1bf Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Thu, 15 Dec 2022 15:56:13 -0800 | 3 | Date: Thu, 15 Dec 2022 15:56:13 -0800 |
4 | Subject: [PATCH] tests: Replace off64_t with off_t | 4 | Subject: [PATCH] tests: Replace off64_t with off_t |
@@ -10,9 +10,12 @@ Upstream-Status: Submitted [https://github.com/strace/strace/pull/230] | |||
10 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 10 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
11 | --- | 11 | --- |
12 | tests/readahead.c | 2 +- | 12 | tests/readahead.c | 2 +- |
13 | tests/sync_file_range.c | 4 ++-- | ||
13 | tests/sync_file_range2.c | 4 ++-- | 14 | tests/sync_file_range2.c | 4 ++-- |
14 | 2 files changed, 3 insertions(+), 3 deletions(-) | 15 | 3 files changed, 5 insertions(+), 5 deletions(-) |
15 | 16 | ||
17 | diff --git a/tests/readahead.c b/tests/readahead.c | ||
18 | index 1072378..b9597e6 100644 | ||
16 | --- a/tests/readahead.c | 19 | --- a/tests/readahead.c |
17 | +++ b/tests/readahead.c | 20 | +++ b/tests/readahead.c |
18 | @@ -42,7 +42,7 @@ static const int fds[] = { | 21 | @@ -42,7 +42,7 @@ static const int fds[] = { |
@@ -24,29 +27,33 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
24 | -0x8000000000000000LL, | 27 | -0x8000000000000000LL, |
25 | -0x5060708090a0b0c0LL, | 28 | -0x5060708090a0b0c0LL, |
26 | -1LL, | 29 | -1LL, |
27 | --- a/tests/sync_file_range2.c | 30 | diff --git a/tests/sync_file_range.c b/tests/sync_file_range.c |
28 | +++ b/tests/sync_file_range2.c | 31 | index e93ab6c..dc76865 100644 |
32 | --- a/tests/sync_file_range.c | ||
33 | +++ b/tests/sync_file_range.c | ||
29 | @@ -20,8 +20,8 @@ int | 34 | @@ -20,8 +20,8 @@ int |
30 | main(void) | 35 | main(void) |
31 | { | 36 | { |
32 | const int fd = -1; | 37 | const int fd = -1; |
33 | - const off64_t offset = 0xdeadbeefbadc0ded; | 38 | - const off64_t offset = 0xdeadbeefbadc0dedULL; |
34 | - const off64_t nbytes = 0xfacefeedcafef00d; | 39 | - const off64_t nbytes = 0xfacefeedcafef00dULL; |
35 | + const off_t offset = 0xdeadbeefbadc0ded; | 40 | + const off_t offset = 0xdeadbeefbadc0dedULL; |
36 | + const off_t nbytes = 0xfacefeedcafef00d; | 41 | + const off_t nbytes = 0xfacefeedcafef00dULL; |
37 | const unsigned int flags = -1; | 42 | const unsigned int flags = -1; |
38 | 43 | ||
39 | int rc = sync_file_range(fd, offset, nbytes, flags); | 44 | int rc = sync_file_range(fd, offset, nbytes, flags); |
40 | --- a/tests/sync_file_range.c | 45 | diff --git a/tests/sync_file_range2.c b/tests/sync_file_range2.c |
41 | +++ b/tests/sync_file_range.c | 46 | index b8dc712..e4b003c 100644 |
47 | --- a/tests/sync_file_range2.c | ||
48 | +++ b/tests/sync_file_range2.c | ||
42 | @@ -20,8 +20,8 @@ int | 49 | @@ -20,8 +20,8 @@ int |
43 | main(void) | 50 | main(void) |
44 | { | 51 | { |
45 | const int fd = -1; | 52 | const int fd = -1; |
46 | - const off64_t offset = 0xdeadbeefbadc0dedULL; | 53 | - const off64_t offset = 0xdeadbeefbadc0ded; |
47 | - const off64_t nbytes = 0xfacefeedcafef00dULL; | 54 | - const off64_t nbytes = 0xfacefeedcafef00d; |
48 | + const off_t offset = 0xdeadbeefbadc0dedULL; | 55 | + const off_t offset = 0xdeadbeefbadc0ded; |
49 | + const off_t nbytes = 0xfacefeedcafef00dULL; | 56 | + const off_t nbytes = 0xfacefeedcafef00d; |
50 | const unsigned int flags = -1; | 57 | const unsigned int flags = -1; |
51 | 58 | ||
52 | int rc = sync_file_range(fd, offset, nbytes, flags); | 59 | int rc = sync_file_range(fd, offset, nbytes, flags); |
diff --git a/meta/recipes-devtools/strace/strace/Makefile-ptest.patch b/meta/recipes-devtools/strace/strace/Makefile-ptest.patch index 9af7737662..3e50bf40f0 100644 --- a/meta/recipes-devtools/strace/strace/Makefile-ptest.patch +++ b/meta/recipes-devtools/strace/strace/Makefile-ptest.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 1f50e0a09d828be8f5b1f28db9af0b12492a1553 Mon Sep 17 00:00:00 2001 | 1 | From 245f6322cf64e562d3227dfac26ce93c905b0380 Mon Sep 17 00:00:00 2001 |
2 | From: Gabriel Barbu <gabriel.barbu@enea.com> | 2 | From: Gabriel Barbu <gabriel.barbu@enea.com> |
3 | Date: Thu, 25 Jul 2013 15:28:33 +0200 | 3 | Date: Thu, 25 Jul 2013 15:28:33 +0200 |
4 | Subject: [PATCH] strace: Add ptest | 4 | Subject: [PATCH] strace: Add ptest |
@@ -8,16 +8,15 @@ Upstream-Status: Inappropriate | |||
8 | Signed-off-by: Gabriel Barbu <gabriel.barbu@enea.com> | 8 | Signed-off-by: Gabriel Barbu <gabriel.barbu@enea.com> |
9 | Signed-off-by: Chong Lu <Chong.Lu@windriver.com> | 9 | Signed-off-by: Chong Lu <Chong.Lu@windriver.com> |
10 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | 10 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> |
11 | |||
12 | --- | 11 | --- |
13 | tests/Makefile.am | 20 ++++++++++++++++++++ | 12 | tests/Makefile.am | 20 ++++++++++++++++++++ |
14 | 1 file changed, 20 insertions(+) | 13 | 1 file changed, 20 insertions(+) |
15 | 14 | ||
16 | diff --git a/tests/Makefile.am b/tests/Makefile.am | 15 | diff --git a/tests/Makefile.am b/tests/Makefile.am |
17 | index 55566ee..a7ae6f9 100644 | 16 | index ef68898..735b73d 100644 |
18 | --- a/tests/Makefile.am | 17 | --- a/tests/Makefile.am |
19 | +++ b/tests/Makefile.am | 18 | +++ b/tests/Makefile.am |
20 | @@ -16,6 +16,7 @@ SIZEOF_LONG = @SIZEOF_LONG@ | 19 | @@ -18,6 +18,7 @@ SIZEOF_LONG = @SIZEOF_LONG@ |
21 | MPERS_NAME = | 20 | MPERS_NAME = |
22 | MPERS_CC_FLAGS = | 21 | MPERS_CC_FLAGS = |
23 | ARCH_MFLAGS = | 22 | ARCH_MFLAGS = |
@@ -25,7 +24,7 @@ index 55566ee..a7ae6f9 100644 | |||
25 | AM_CFLAGS = $(WARN_CFLAGS) | 24 | AM_CFLAGS = $(WARN_CFLAGS) |
26 | bundled_CPPFLAGS = | 25 | bundled_CPPFLAGS = |
27 | if USE_BUNDLED_HEADERS | 26 | if USE_BUNDLED_HEADERS |
28 | @@ -703,3 +704,22 @@ BUILT_SOURCES = ksysent.h | 27 | @@ -889,3 +890,22 @@ BUILT_SOURCES = ksysent.h |
29 | CLEANFILES = ksysent.h | 28 | CLEANFILES = ksysent.h |
30 | 29 | ||
31 | include ../src/scno.am | 30 | include ../src/scno.am |
diff --git a/meta/recipes-devtools/strace/strace/ptest-spacesave.patch b/meta/recipes-devtools/strace/strace/ptest-spacesave.patch index 4e86ccadc5..14ea469c59 100644 --- a/meta/recipes-devtools/strace/strace/ptest-spacesave.patch +++ b/meta/recipes-devtools/strace/strace/ptest-spacesave.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 3fad4821d90cd264d1b94253b9cf4fdf5d4034b8 Mon Sep 17 00:00:00 2001 | 1 | From cf77d301faf96cc892c6df0e19fccbf5853f249d Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Wed, 29 May 2019 00:10:32 +0100 | 3 | Date: Wed, 29 May 2019 00:10:32 +0100 |
4 | Subject: [PATCH] strace: Tweak ptest disk space management | 4 | Subject: [PATCH] strace: Tweak ptest disk space management |
@@ -9,16 +9,15 @@ using around 600MB of disk space and running our ptest images out of space. | |||
9 | RP 2019/5/29 | 9 | RP 2019/5/29 |
10 | 10 | ||
11 | Upstream-Status: Inappropriate [specific to OE image space issues] | 11 | Upstream-Status: Inappropriate [specific to OE image space issues] |
12 | |||
13 | --- | 12 | --- |
14 | tests/gen_tests.sh | 1 + | 13 | tests/gen_tests.sh | 1 + |
15 | 1 file changed, 1 insertion(+) | 14 | 1 file changed, 1 insertion(+) |
16 | 15 | ||
17 | diff --git a/tests/gen_tests.sh b/tests/gen_tests.sh | 16 | diff --git a/tests/gen_tests.sh b/tests/gen_tests.sh |
18 | index 3540204..5e1e7c9 100755 | 17 | index ce1467e..8f47651 100755 |
19 | --- a/tests/gen_tests.sh | 18 | --- a/tests/gen_tests.sh |
20 | +++ b/tests/gen_tests.sh | 19 | +++ b/tests/gen_tests.sh |
21 | @@ -62,6 +62,7 @@ while read -r name arg0 args; do { | 20 | @@ -73,6 +73,7 @@ while read -r name arg0 args; do { |
22 | $hdr | 21 | $hdr |
23 | . "\${srcdir=.}/init.sh" | 22 | . "\${srcdir=.}/init.sh" |
24 | run_strace_match_diff $arg0 $args | 23 | run_strace_match_diff $arg0 $args |
diff --git a/meta/recipes-devtools/strace/strace/skip-load.patch b/meta/recipes-devtools/strace/strace/skip-load.patch index b1acfda5d8..fe737d8f9a 100644 --- a/meta/recipes-devtools/strace/strace/skip-load.patch +++ b/meta/recipes-devtools/strace/strace/skip-load.patch | |||
@@ -1,9 +1,13 @@ | |||
1 | From 3af7272a3435fc79e5da729155480a346ccf2d44 Mon Sep 17 00:00:00 2001 | ||
2 | From: Ross Burton <ross.burton@arm.com> | ||
3 | Date: Mon, 31 Jan 2022 17:40:13 +0000 | ||
4 | Subject: [PATCH] strace: skip a number of load-sensitive tests | ||
5 | |||
1 | Skip tests which are known to be unreliable under load, typically because they | 6 | Skip tests which are known to be unreliable under load, typically because they |
2 | care about timing. | 7 | care about timing. |
3 | 8 | ||
4 | Upstream-Status: Inappropriate | 9 | Upstream-Status: Inappropriate |
5 | Signed-off-by: Ross Burton <ross.burton@arm.com> | 10 | Signed-off-by: Ross Burton <ross.burton@arm.com> |
6 | |||
7 | --- | 11 | --- |
8 | tests/clock_nanosleep.gen.test | 1 + | 12 | tests/clock_nanosleep.gen.test | 1 + |
9 | tests/delay.test | 1 + | 13 | tests/delay.test | 1 + |
@@ -44,6 +48,3 @@ index 8299737..d89c7df 100755 | |||
44 | 48 | ||
45 | r_opt="${1:--r}" | 49 | r_opt="${1:--r}" |
46 | 50 | ||
47 | -- | ||
48 | 2.25.1 | ||
49 | |||
diff --git a/meta/recipes-devtools/strace/strace/update-gawk-paths.patch b/meta/recipes-devtools/strace/strace/update-gawk-paths.patch index a16ede95c2..429588d218 100644 --- a/meta/recipes-devtools/strace/strace/update-gawk-paths.patch +++ b/meta/recipes-devtools/strace/strace/update-gawk-paths.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 4cd26cfaec255ec87f22abe886e0be89312a9671 Mon Sep 17 00:00:00 2001 | 1 | From 920ff2e0e838fae4fe49bf8e8fa093d5b2485677 Mon Sep 17 00:00:00 2001 |
2 | From: Andre McCurdy <armccurdy@gmail.com> | 2 | From: Andre McCurdy <armccurdy@gmail.com> |
3 | Date: Mon, 18 Jan 2016 11:01:00 -0800 | 3 | Date: Mon, 18 Jan 2016 11:01:00 -0800 |
4 | Subject: [PATCH] update gawk paths, /bin/gawk -> /usr/bin/gawk | 4 | Subject: [PATCH] update gawk paths, /bin/gawk -> /usr/bin/gawk |
@@ -11,22 +11,24 @@ from #!/bin/gawk to #!/usr/bin/gawk. Fixes missing RDPENDS QA tests: | |||
11 | Upstream-Status: Inappropriate [configuration] | 11 | Upstream-Status: Inappropriate [configuration] |
12 | 12 | ||
13 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> | 13 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> |
14 | |||
15 | --- | 14 | --- |
16 | src/mpers.awk | 2 +- | 15 | src/mpers.awk | 2 +- |
16 | tests-m32/caps-abbrev.awk | 2 +- | ||
17 | tests-m32/caps.awk | 2 +- | 17 | tests-m32/caps.awk | 2 +- |
18 | tests-m32/match.awk | 2 +- | 18 | tests-m32/match.awk | 2 +- |
19 | tests-m32/rt_sigaction.awk | 2 +- | 19 | tests-m32/rt_sigaction.awk | 2 +- |
20 | tests-mx32/caps-abbrev.awk | 2 +- | ||
20 | tests-mx32/caps.awk | 2 +- | 21 | tests-mx32/caps.awk | 2 +- |
21 | tests-mx32/match.awk | 2 +- | 22 | tests-mx32/match.awk | 2 +- |
22 | tests-mx32/rt_sigaction.awk | 2 +- | 23 | tests-mx32/rt_sigaction.awk | 2 +- |
24 | tests/caps-abbrev.awk | 2 +- | ||
23 | tests/caps.awk | 2 +- | 25 | tests/caps.awk | 2 +- |
24 | tests/match.awk | 2 +- | 26 | tests/match.awk | 2 +- |
25 | tests/rt_sigaction.awk | 2 +- | 27 | tests/rt_sigaction.awk | 2 +- |
26 | 10 files changed, 10 insertions(+), 10 deletions(-) | 28 | 13 files changed, 13 insertions(+), 13 deletions(-) |
27 | 29 | ||
28 | diff --git a/src/mpers.awk b/src/mpers.awk | 30 | diff --git a/src/mpers.awk b/src/mpers.awk |
29 | index 25a212f..b2ff53f 100644 | 31 | index 425ab65..33ece5c 100644 |
30 | --- a/src/mpers.awk | 32 | --- a/src/mpers.awk |
31 | +++ b/src/mpers.awk | 33 | +++ b/src/mpers.awk |
32 | @@ -1,4 +1,4 @@ | 34 | @@ -1,4 +1,4 @@ |
@@ -35,6 +37,16 @@ index 25a212f..b2ff53f 100644 | |||
35 | # | 37 | # |
36 | # Copyright (c) 2015 Elvira Khabirova <lineprinter0@gmail.com> | 38 | # Copyright (c) 2015 Elvira Khabirova <lineprinter0@gmail.com> |
37 | # Copyright (c) 2015-2016 Dmitry V. Levin <ldv@strace.io> | 39 | # Copyright (c) 2015-2016 Dmitry V. Levin <ldv@strace.io> |
40 | diff --git a/tests-m32/caps-abbrev.awk b/tests-m32/caps-abbrev.awk | ||
41 | index e8393fd..76c4e51 100644 | ||
42 | --- a/tests-m32/caps-abbrev.awk | ||
43 | +++ b/tests-m32/caps-abbrev.awk | ||
44 | @@ -1,4 +1,4 @@ | ||
45 | -#!/bin/gawk | ||
46 | +#!/usr/bin/gawk | ||
47 | # | ||
48 | # This file is part of caps strace test. | ||
49 | # | ||
38 | diff --git a/tests-m32/caps.awk b/tests-m32/caps.awk | 50 | diff --git a/tests-m32/caps.awk b/tests-m32/caps.awk |
39 | index 69500ec..e5dfd87 100644 | 51 | index 69500ec..e5dfd87 100644 |
40 | --- a/tests-m32/caps.awk | 52 | --- a/tests-m32/caps.awk |
@@ -65,6 +77,16 @@ index dce78f5..573d9ea 100644 | |||
65 | # | 77 | # |
66 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> | 78 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> |
67 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> | 79 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> |
80 | diff --git a/tests-mx32/caps-abbrev.awk b/tests-mx32/caps-abbrev.awk | ||
81 | index e8393fd..76c4e51 100644 | ||
82 | --- a/tests-mx32/caps-abbrev.awk | ||
83 | +++ b/tests-mx32/caps-abbrev.awk | ||
84 | @@ -1,4 +1,4 @@ | ||
85 | -#!/bin/gawk | ||
86 | +#!/usr/bin/gawk | ||
87 | # | ||
88 | # This file is part of caps strace test. | ||
89 | # | ||
68 | diff --git a/tests-mx32/caps.awk b/tests-mx32/caps.awk | 90 | diff --git a/tests-mx32/caps.awk b/tests-mx32/caps.awk |
69 | index 69500ec..e5dfd87 100644 | 91 | index 69500ec..e5dfd87 100644 |
70 | --- a/tests-mx32/caps.awk | 92 | --- a/tests-mx32/caps.awk |
@@ -95,6 +117,16 @@ index dce78f5..573d9ea 100644 | |||
95 | # | 117 | # |
96 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> | 118 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> |
97 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> | 119 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> |
120 | diff --git a/tests/caps-abbrev.awk b/tests/caps-abbrev.awk | ||
121 | index e8393fd..76c4e51 100644 | ||
122 | --- a/tests/caps-abbrev.awk | ||
123 | +++ b/tests/caps-abbrev.awk | ||
124 | @@ -1,4 +1,4 @@ | ||
125 | -#!/bin/gawk | ||
126 | +#!/usr/bin/gawk | ||
127 | # | ||
128 | # This file is part of caps strace test. | ||
129 | # | ||
98 | diff --git a/tests/caps.awk b/tests/caps.awk | 130 | diff --git a/tests/caps.awk b/tests/caps.awk |
99 | index 69500ec..e5dfd87 100644 | 131 | index 69500ec..e5dfd87 100644 |
100 | --- a/tests/caps.awk | 132 | --- a/tests/caps.awk |
@@ -125,33 +157,3 @@ index dce78f5..573d9ea 100644 | |||
125 | # | 157 | # |
126 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> | 158 | # Copyright (c) 2014-2015 Dmitry V. Levin <ldv@strace.io> |
127 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> | 159 | # Copyright (c) 2016 Elvira Khabirova <lineprinter0@gmail.com> |
128 | diff --git a/tests-m32/caps-abbrev.awk b/tests-m32/caps-abbrev.awk | ||
129 | index c00023b..a56cd56 100644 | ||
130 | --- a/tests-m32/caps-abbrev.awk | ||
131 | +++ b/tests-m32/caps-abbrev.awk | ||
132 | @@ -1,4 +1,4 @@ | ||
133 | -#!/bin/gawk | ||
134 | +#!/usr/bin/gawk | ||
135 | # | ||
136 | # This file is part of caps strace test. | ||
137 | # | ||
138 | diff --git a/tests-mx32/caps-abbrev.awk b/tests-mx32/caps-abbrev.awk | ||
139 | index c00023b..a56cd56 100644 | ||
140 | --- a/tests-mx32/caps-abbrev.awk | ||
141 | +++ b/tests-mx32/caps-abbrev.awk | ||
142 | @@ -1,4 +1,4 @@ | ||
143 | -#!/bin/gawk | ||
144 | +#!/usr/bin/gawk | ||
145 | # | ||
146 | # This file is part of caps strace test. | ||
147 | # | ||
148 | diff --git a/tests/caps-abbrev.awk b/tests/caps-abbrev.awk | ||
149 | index c00023b..a56cd56 100644 | ||
150 | --- a/tests/caps-abbrev.awk | ||
151 | +++ b/tests/caps-abbrev.awk | ||
152 | @@ -1,4 +1,4 @@ | ||
153 | -#!/bin/gawk | ||
154 | +#!/usr/bin/gawk | ||
155 | # | ||
156 | # This file is part of caps strace test. | ||
157 | # | ||
diff --git a/meta/recipes-devtools/strace/strace_6.8.bb b/meta/recipes-devtools/strace/strace_6.9.bb index b22c57b45d..bb9f8e42ba 100644 --- a/meta/recipes-devtools/strace/strace_6.8.bb +++ b/meta/recipes-devtools/strace/strace_6.9.bb | |||
@@ -15,7 +15,7 @@ SRC_URI = "https://strace.io/files/${PV}/strace-${PV}.tar.xz \ | |||
15 | file://0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch \ | 15 | file://0001-configure-Use-autoconf-macro-to-detect-largefile-sup.patch \ |
16 | file://0002-tests-Replace-off64_t-with-off_t.patch \ | 16 | file://0002-tests-Replace-off64_t-with-off_t.patch \ |
17 | " | 17 | " |
18 | SRC_URI[sha256sum] = "ba6950a96824cdf93a584fa04f0a733896d2a6bc5f0ad9ffe505d9b41e970149" | 18 | SRC_URI[sha256sum] = "da189e990a82e3ca3a5a4631012f7ecfd489dab459854d82d8caf6a865c1356a" |
19 | 19 | ||
20 | inherit autotools ptest | 20 | inherit autotools ptest |
21 | 21 | ||
@@ -47,10 +47,6 @@ do_install_ptest() { | |||
47 | sed -i -e '/^src/s/strace.*[0-9]/ptest/' ${D}/${PTEST_PATH}/${TESTDIR}/Makefile | 47 | sed -i -e '/^src/s/strace.*[0-9]/ptest/' ${D}/${PTEST_PATH}/${TESTDIR}/Makefile |
48 | } | 48 | } |
49 | 49 | ||
50 | RDEPENDS:${PN}-ptest += "make coreutils grep gawk sed" | 50 | RDEPENDS:${PN}-ptest += "make coreutils grep gawk sed locale-base-en-us" |
51 | |||
52 | RDEPENDS:${PN}-ptest:append:libc-glibc = "\ | ||
53 | locale-base-en-us.iso-8859-1 \ | ||
54 | " | ||
55 | 51 | ||
56 | BBCLASSEXTEND = "native" | 52 | BBCLASSEXTEND = "native" |
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-Define-portable-basename-function.patch b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-Define-portable-basename-function.patch new file mode 100644 index 0000000000..dc4c44c6af --- /dev/null +++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart/0001-Define-portable-basename-function.patch | |||
@@ -0,0 +1,59 @@ | |||
1 | From 4b19c32791fb8a8663b3335f8a3675a2bbabe688 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Mon, 20 May 2024 18:40:36 -0700 | ||
4 | Subject: [PATCH] Define portable basename function | ||
5 | |||
6 | Newer version of musl have removed prototype for basename in string.h [1] | ||
7 | which now makes it fail to compile with GCC14+ compiler therefore | ||
8 | define local basename utility function. | ||
9 | |||
10 | [1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7 | ||
11 | |||
12 | Upstream-Status: Submitted [https://github.com/systemd/systemd-bootchart/pull/53] | ||
13 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
14 | --- | ||
15 | src/conf-files.c | 14 ++++++++++++-- | ||
16 | 1 file changed, 12 insertions(+), 2 deletions(-) | ||
17 | |||
18 | diff --git a/src/conf-files.c b/src/conf-files.c | ||
19 | index 5dd2d7d..b932bb2 100644 | ||
20 | --- a/src/conf-files.c | ||
21 | +++ b/src/conf-files.c | ||
22 | @@ -35,6 +35,16 @@ | ||
23 | #include "strv.h" | ||
24 | #include "util.h" | ||
25 | |||
26 | +/*** | ||
27 | + * basename is implemented differently across different C libraries. This | ||
28 | + * implementation matches the one provided by the GNU libc, and does not | ||
29 | + * modify its input parameter. | ||
30 | +***/ | ||
31 | +static const char *sbc_basename(const char *path) { | ||
32 | + const char *base = strrchr(path, '/'); | ||
33 | + return base ? base + 1 : path; | ||
34 | +} | ||
35 | + | ||
36 | static int files_add(Hashmap *h, const char *root, const char *path, const char *suffix) { | ||
37 | _cleanup_closedir_ DIR *dir = NULL; | ||
38 | const char *dirpath; | ||
39 | @@ -63,7 +73,7 @@ static int files_add(Hashmap *h, const char *root, const char *path, const char | ||
40 | if (!p) | ||
41 | return -ENOMEM; | ||
42 | |||
43 | - r = hashmap_put(h, basename(p), p); | ||
44 | + r = hashmap_put(h, sbc_basename(p), p); | ||
45 | if (r == -EEXIST) { | ||
46 | log_debug("Skipping overridden file: %s.", p); | ||
47 | free(p); | ||
48 | @@ -84,7 +94,7 @@ static int base_cmp(const void *a, const void *b) { | ||
49 | |||
50 | s1 = *(char * const *)a; | ||
51 | s2 = *(char * const *)b; | ||
52 | - return strcmp(basename(s1), basename(s2)); | ||
53 | + return strcmp(sbc_basename(s1), sbc_basename(s2)); | ||
54 | } | ||
55 | |||
56 | static int conf_files_list_strv_internal(char ***strv, const char *suffix, const char *root, char **dirs) { | ||
57 | -- | ||
58 | 2.45.1 | ||
59 | |||
diff --git a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb index 25544029d5..3c3c84ff4a 100644 --- a/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb +++ b/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_235.bb | |||
@@ -17,6 +17,7 @@ SRC_URI:append:libc-musl = " \ | |||
17 | file://0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch \ | 17 | file://0001-comparison_fn_t-is-glibc-specific-use-raw-signature-.patch \ |
18 | file://0002-musl-does-not-provide-printf-h.patch \ | 18 | file://0002-musl-does-not-provide-printf-h.patch \ |
19 | file://0003-musl-does-not-provide-canonicalize_file_name.patch \ | 19 | file://0003-musl-does-not-provide-canonicalize_file_name.patch \ |
20 | file://0001-Define-portable-basename-function.patch \ | ||
20 | " | 21 | " |
21 | 22 | ||
22 | 23 | ||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-Return-a-valid-exit_code-from-vg_regtest.patch b/meta/recipes-devtools/valgrind/valgrind/0001-Return-a-valid-exit_code-from-vg_regtest.patch index e7af5efecc..832a1c3d6e 100644 --- a/meta/recipes-devtools/valgrind/valgrind/0001-Return-a-valid-exit_code-from-vg_regtest.patch +++ b/meta/recipes-devtools/valgrind/valgrind/0001-Return-a-valid-exit_code-from-vg_regtest.patch | |||
@@ -10,11 +10,9 @@ Signed-off-by: Randy MacLeod <Randy.MacLeod@windriver.com> | |||
10 | tests/vg_regtest.in | 1 + | 10 | tests/vg_regtest.in | 1 + |
11 | 1 file changed, 1 insertion(+) | 11 | 1 file changed, 1 insertion(+) |
12 | 12 | ||
13 | diff --git a/tests/vg_regtest.in b/tests/vg_regtest.in | ||
14 | index 909af3be8..f2799c4e3 100755 | ||
15 | --- a/tests/vg_regtest.in | 13 | --- a/tests/vg_regtest.in |
16 | +++ b/tests/vg_regtest.in | 14 | +++ b/tests/vg_regtest.in |
17 | @@ -403,6 +403,7 @@ sub mysystem($) | 15 | @@ -401,6 +401,7 @@ sub mysystem($) |
18 | exit 1; | 16 | exit 1; |
19 | } | 17 | } |
20 | } | 18 | } |
@@ -22,6 +20,3 @@ index 909af3be8..f2799c4e3 100755 | |||
22 | } | 20 | } |
23 | # if $keepunfiltered, copies $1 to $1.unfiltered.out | 21 | # if $keepunfiltered, copies $1 to $1.unfiltered.out |
24 | # renames $0 tp $1 | 22 | # renames $0 tp $1 |
25 | -- | ||
26 | 2.17.0 | ||
27 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-configure-Drop-setting-mcpu-cortex-a8-on-arm.patch b/meta/recipes-devtools/valgrind/valgrind/0001-configure-Drop-setting-mcpu-cortex-a8-on-arm.patch new file mode 100644 index 0000000000..63a1a58c04 --- /dev/null +++ b/meta/recipes-devtools/valgrind/valgrind/0001-configure-Drop-setting-mcpu-cortex-a8-on-arm.patch | |||
@@ -0,0 +1,40 @@ | |||
1 | From b17b6d7d3a65c14f91f090f3f4f9898d6fa3a9e4 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Fri, 10 May 2024 16:27:34 -0700 | ||
4 | Subject: [PATCH] configure: Drop setting mcpu=cortex-a8 on arm | ||
5 | |||
6 | The -march settings from environment expresses the flags | ||
7 | appropriately, moreover, this conflicts when using armhf | ||
8 | without neon [1] | ||
9 | |||
10 | [1] https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=928224 | ||
11 | |||
12 | Upstream-Status: Submitted [https://bugs.kde.org/show_bug.cgi?id=454346] | ||
13 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
14 | --- | ||
15 | configure.ac | 4 ++-- | ||
16 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
17 | |||
18 | diff --git a/configure.ac b/configure.ac | ||
19 | index 39d8b6d..d61b3a6 100755 | ||
20 | --- a/configure.ac | ||
21 | +++ b/configure.ac | ||
22 | @@ -275,13 +275,13 @@ case "${host_cpu}" in | ||
23 | |||
24 | armv8*) | ||
25 | AC_MSG_RESULT([ok (${host_cpu})]) | ||
26 | - VGCONF_PLATFORM_ARM_ARCH="-marm -mcpu=cortex-a8" | ||
27 | + VGCONF_PLATFORM_ARM_ARCH="-marm" | ||
28 | ARCH_MAX="arm" | ||
29 | ;; | ||
30 | |||
31 | armv7*) | ||
32 | AC_MSG_RESULT([ok (${host_cpu})]) | ||
33 | - VGCONF_PLATFORM_ARM_ARCH="-marm -mcpu=cortex-a8" | ||
34 | + VGCONF_PLATFORM_ARM_ARCH="-marm" | ||
35 | ARCH_MAX="arm" | ||
36 | ;; | ||
37 | |||
38 | -- | ||
39 | 2.45.0 | ||
40 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-docs-Disable-manual-validation.patch b/meta/recipes-devtools/valgrind/valgrind/0001-docs-Disable-manual-validation.patch index 256ddb0c34..eba569c884 100644 --- a/meta/recipes-devtools/valgrind/valgrind/0001-docs-Disable-manual-validation.patch +++ b/meta/recipes-devtools/valgrind/valgrind/0001-docs-Disable-manual-validation.patch | |||
@@ -18,11 +18,9 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
18 | docs/Makefile.am | 2 +- | 18 | docs/Makefile.am | 2 +- |
19 | 1 file changed, 1 insertion(+), 1 deletion(-) | 19 | 1 file changed, 1 insertion(+), 1 deletion(-) |
20 | 20 | ||
21 | diff --git a/docs/Makefile.am b/docs/Makefile.am | ||
22 | index d4250e8..c500306 100644 | ||
23 | --- a/docs/Makefile.am | 21 | --- a/docs/Makefile.am |
24 | +++ b/docs/Makefile.am | 22 | +++ b/docs/Makefile.am |
25 | @@ -192,7 +192,7 @@ valid-manpages: $(MANPAGES_XML_FILES) | 23 | @@ -194,7 +194,7 @@ valid-manpages: $(MANPAGES_XML_FILES) |
26 | touch $@; \ | 24 | touch $@; \ |
27 | fi | 25 | fi |
28 | 26 | ||
@@ -31,6 +29,3 @@ index d4250e8..c500306 100644 | |||
31 | 29 | ||
32 | # The text version of the FAQ. | 30 | # The text version of the FAQ. |
33 | FAQ.txt: | 31 | FAQ.txt: |
34 | -- | ||
35 | 2.34.1 | ||
36 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch b/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch index 073713c139..7996fa6e8f 100644 --- a/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch +++ b/meta/recipes-devtools/valgrind/valgrind/0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch | |||
@@ -19,30 +19,9 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
19 | none/tests/arm/Makefile.am | 18 +++++++++--------- | 19 | none/tests/arm/Makefile.am | 18 +++++++++--------- |
20 | 3 files changed, 15 insertions(+), 15 deletions(-) | 20 | 3 files changed, 15 insertions(+), 15 deletions(-) |
21 | 21 | ||
22 | diff --git a/Makefile.all.am b/Makefile.all.am | ||
23 | index 02059a3..c7c4700 100644 | ||
24 | --- a/Makefile.all.am | ||
25 | +++ b/Makefile.all.am | ||
26 | @@ -197,11 +197,11 @@ AM_CCASFLAGS_PPC64LE_LINUX = @FLAG_M64@ -g | ||
27 | |||
28 | AM_FLAG_M3264_ARM_LINUX = @FLAG_M32@ | ||
29 | AM_CFLAGS_ARM_LINUX = @FLAG_M32@ \ | ||
30 | - $(AM_CFLAGS_BASE) -marm -mcpu=cortex-a8 | ||
31 | + $(AM_CFLAGS_BASE) -marm | ||
32 | AM_CFLAGS_PSO_ARM_LINUX = @FLAG_M32@ $(AM_CFLAGS_BASE) \ | ||
33 | - -marm -mcpu=cortex-a8 $(AM_CFLAGS_PSO_BASE) | ||
34 | + -marm $(AM_CFLAGS_PSO_BASE) | ||
35 | AM_CCASFLAGS_ARM_LINUX = @FLAG_M32@ \ | ||
36 | - -marm -mcpu=cortex-a8 -g | ||
37 | + -marm -g | ||
38 | |||
39 | AM_FLAG_M3264_ARM64_LINUX = @FLAG_M64@ | ||
40 | AM_CFLAGS_ARM64_LINUX = @FLAG_M64@ $(AM_CFLAGS_BASE) | ||
41 | diff --git a/helgrind/tests/Makefile.am b/helgrind/tests/Makefile.am | ||
42 | index df82169..07eb66a 100644 | ||
43 | --- a/helgrind/tests/Makefile.am | 22 | --- a/helgrind/tests/Makefile.am |
44 | +++ b/helgrind/tests/Makefile.am | 23 | +++ b/helgrind/tests/Makefile.am |
45 | @@ -189,9 +189,9 @@ if ! VGCONF_PLATFORMS_INCLUDE_X86_DARWIN | 24 | @@ -223,9 +223,9 @@ if ! VGCONF_PLATFORMS_INCLUDE_X86_DARWIN |
46 | endif | 25 | endif |
47 | 26 | ||
48 | if VGCONF_PLATFORMS_INCLUDE_ARM_LINUX | 27 | if VGCONF_PLATFORMS_INCLUDE_ARM_LINUX |
@@ -55,11 +34,9 @@ index df82169..07eb66a 100644 | |||
55 | else | 34 | else |
56 | annotate_hbefore_CFLAGS = $(AM_CFLAGS) | 35 | annotate_hbefore_CFLAGS = $(AM_CFLAGS) |
57 | tc07_hbl1_CFLAGS = $(AM_CFLAGS) | 36 | tc07_hbl1_CFLAGS = $(AM_CFLAGS) |
58 | diff --git a/none/tests/arm/Makefile.am b/none/tests/arm/Makefile.am | ||
59 | index 024eb6d..ccecb90 100644 | ||
60 | --- a/none/tests/arm/Makefile.am | 37 | --- a/none/tests/arm/Makefile.am |
61 | +++ b/none/tests/arm/Makefile.am | 38 | +++ b/none/tests/arm/Makefile.am |
62 | @@ -52,10 +52,10 @@ allexec_CFLAGS = $(AM_CFLAGS) @FLAG_W_NO_NONNULL@ | 39 | @@ -56,10 +56,10 @@ allexec_CFLAGS = $(AM_CFLAGS) @FLAG_W_N |
63 | # need special helping w.r.t -mfpu and -mfloat-abi, though. | 40 | # need special helping w.r.t -mfpu and -mfloat-abi, though. |
64 | # Also force -O0 since -O takes hundreds of MB of memory | 41 | # Also force -O0 since -O takes hundreds of MB of memory |
65 | # for v6intThumb.c. | 42 | # for v6intThumb.c. |
@@ -71,11 +48,11 @@ index 024eb6d..ccecb90 100644 | |||
71 | -v6media_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 -mthumb | 48 | -v6media_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 -mthumb |
72 | +v6media_CFLAGS = $(AM_CFLAGS) -g -O0 -mthumb | 49 | +v6media_CFLAGS = $(AM_CFLAGS) -g -O0 -mthumb |
73 | 50 | ||
74 | v8crypto_a_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -marm | 51 | v8crypto_a_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -marm -march=armv8-a |
75 | v8crypto_t_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -mthumb | 52 | v8crypto_t_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -mthumb -march=armv8-a |
76 | @@ -65,23 +65,23 @@ v8memory_a_CFLAGS = $(AM_CFLAGS) -g -O0 \ | 53 | @@ -74,23 +74,23 @@ v8memory_t_CFLAGS = $(AM_CFLAGS) -g -O0 |
77 | v8memory_t_CFLAGS = $(AM_CFLAGS) -g -O0 \ | 54 | |
78 | -march=armv8-a -mfpu=crypto-neon-fp-armv8 -mthumb | 55 | vcvt_fixed_float_VFP_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 -mfpu=neon |
79 | 56 | ||
80 | -vfp_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 \ | 57 | -vfp_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 \ |
81 | +vfp_CFLAGS = $(AM_CFLAGS) -g -O0 \ | 58 | +vfp_CFLAGS = $(AM_CFLAGS) -g -O0 \ |
@@ -103,6 +80,3 @@ index 024eb6d..ccecb90 100644 | |||
103 | +vcvt_fixed_float_VFP_CFLAGS = $(AM_CFLAGS) -g -mfpu=vfpv3 | 80 | +vcvt_fixed_float_VFP_CFLAGS = $(AM_CFLAGS) -g -mfpu=vfpv3 |
104 | 81 | ||
105 | vfpv4_fma_CFLAGS = $(AM_CFLAGS) -g -O0 -march=armv7ve -mcpu=cortex-a15 -mfpu=vfpv4 -marm | 82 | vfpv4_fma_CFLAGS = $(AM_CFLAGS) -g -O0 -march=armv7ve -mcpu=cortex-a15 -mfpu=vfpv4 -marm |
106 | -- | ||
107 | 2.12.2 | ||
108 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-tests-Fix-timerfd-syscall-test.patch b/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-tests-Fix-timerfd-syscall-test.patch index 15fbbe954f..29a9e95a90 100644 --- a/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-tests-Fix-timerfd-syscall-test.patch +++ b/meta/recipes-devtools/valgrind/valgrind/0001-memcheck-tests-Fix-timerfd-syscall-test.patch | |||
@@ -17,7 +17,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
17 | 17 | ||
18 | --- a/config.h.in | 18 | --- a/config.h.in |
19 | +++ b/config.h.in | 19 | +++ b/config.h.in |
20 | @@ -301,6 +301,9 @@ | 20 | @@ -424,6 +424,9 @@ |
21 | /* Define to 1 if you have the <sys/sysnvl.h> header file. */ | 21 | /* Define to 1 if you have the <sys/sysnvl.h> header file. */ |
22 | #undef HAVE_SYS_SYSNVL_H | 22 | #undef HAVE_SYS_SYSNVL_H |
23 | 23 | ||
@@ -29,7 +29,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
29 | 29 | ||
30 | --- a/configure.ac | 30 | --- a/configure.ac |
31 | +++ b/configure.ac | 31 | +++ b/configure.ac |
32 | @@ -4098,6 +4098,7 @@ AC_CHECK_HEADERS([ \ | 32 | @@ -4881,6 +4881,7 @@ AC_CHECK_HEADERS([ \ |
33 | sys/syscall.h \ | 33 | sys/syscall.h \ |
34 | sys/sysnvl.h \ | 34 | sys/sysnvl.h \ |
35 | sys/time.h \ | 35 | sys/time.h \ |
@@ -39,7 +39,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
39 | 39 | ||
40 | --- a/memcheck/tests/linux/timerfd-syscall.c | 40 | --- a/memcheck/tests/linux/timerfd-syscall.c |
41 | +++ b/memcheck/tests/linux/timerfd-syscall.c | 41 | +++ b/memcheck/tests/linux/timerfd-syscall.c |
42 | @@ -45,6 +45,9 @@ | 42 | @@ -42,6 +42,9 @@ |
43 | #if defined(HAVE_SYS_TIME_H) | 43 | #if defined(HAVE_SYS_TIME_H) |
44 | #include <sys/time.h> | 44 | #include <sys/time.h> |
45 | #endif | 45 | #endif |
@@ -49,7 +49,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
49 | #if defined(HAVE_SYS_TYPES_H) | 49 | #if defined(HAVE_SYS_TYPES_H) |
50 | #include <sys/types.h> | 50 | #include <sys/types.h> |
51 | #endif | 51 | #endif |
52 | @@ -54,7 +57,8 @@ | 52 | @@ -51,7 +54,8 @@ |
53 | * timerfd_* system call numbers introduced in 2.6.23. These constants are | 53 | * timerfd_* system call numbers introduced in 2.6.23. These constants are |
54 | * not yet in the glibc 2.7 headers, that is why they are defined here. | 54 | * not yet in the glibc 2.7 headers, that is why they are defined here. |
55 | */ | 55 | */ |
@@ -59,7 +59,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
59 | #if defined(__x86_64__) | 59 | #if defined(__x86_64__) |
60 | #define __NR_timerfd_create 283 | 60 | #define __NR_timerfd_create 283 |
61 | #elif defined(__i386__) | 61 | #elif defined(__i386__) |
62 | @@ -67,8 +71,10 @@ | 62 | @@ -64,8 +68,10 @@ |
63 | #error Cannot detect your architecture! | 63 | #error Cannot detect your architecture! |
64 | #endif | 64 | #endif |
65 | #endif | 65 | #endif |
@@ -71,7 +71,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
71 | #if defined(__x86_64__) | 71 | #if defined(__x86_64__) |
72 | #define __NR_timerfd_settime 286 | 72 | #define __NR_timerfd_settime 286 |
73 | #define __NR_timerfd_gettime 287 | 73 | #define __NR_timerfd_gettime 287 |
74 | @@ -85,7 +91,7 @@ | 74 | @@ -82,7 +88,7 @@ |
75 | #error Cannot detect your architecture! | 75 | #error Cannot detect your architecture! |
76 | #endif | 76 | #endif |
77 | #endif | 77 | #endif |
@@ -80,7 +80,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
80 | 80 | ||
81 | 81 | ||
82 | /* Definitions from include/linux/timerfd.h */ | 82 | /* Definitions from include/linux/timerfd.h */ |
83 | @@ -127,6 +133,7 @@ void set_timespec(struct timespec *tmr, | 83 | @@ -124,6 +130,7 @@ void set_timespec(struct timespec *tmr, |
84 | tmr->tv_nsec = (long) (1000ULL * (ustime % 1000000ULL)); | 84 | tmr->tv_nsec = (long) (1000ULL * (ustime % 1000000ULL)); |
85 | } | 85 | } |
86 | 86 | ||
@@ -88,7 +88,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
88 | int timerfd_create(int clockid, int flags) | 88 | int timerfd_create(int clockid, int flags) |
89 | { | 89 | { |
90 | return syscall(__NR_timerfd_create, clockid, flags); | 90 | return syscall(__NR_timerfd_create, clockid, flags); |
91 | @@ -142,6 +149,7 @@ int timerfd_gettime(int ufc, struct itim | 91 | @@ -139,6 +146,7 @@ int timerfd_gettime(int ufc, struct itim |
92 | { | 92 | { |
93 | return syscall(__NR_timerfd_gettime, ufc, otmr); | 93 | return syscall(__NR_timerfd_gettime, ufc, otmr); |
94 | } | 94 | } |
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-tests-arm-Use-O-instead-of-O0.patch b/meta/recipes-devtools/valgrind/valgrind/0001-tests-arm-Use-O-instead-of-O0.patch new file mode 100644 index 0000000000..2d28bcb6c4 --- /dev/null +++ b/meta/recipes-devtools/valgrind/valgrind/0001-tests-arm-Use-O-instead-of-O0.patch | |||
@@ -0,0 +1,34 @@ | |||
1 | From 8e907753625faba3284925983ef8e83f8491aa76 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Fri, 10 May 2024 17:44:47 -0700 | ||
4 | Subject: [PATCH] tests/arm: Use -O instead of -O0 | ||
5 | |||
6 | GCC-14 fails to compile the testcase with -O2 [1] | ||
7 | therefore workaround it by using -O1, which works | ||
8 | and for OE/cross-compiling it is fine, -O0 is used | ||
9 | to make these tests compile natively on an arm target | ||
10 | and gcc would take less memory compiling with -O0 | ||
11 | -O would take a bit more memory but in cross compiling | ||
12 | we have enough of it on build host. | ||
13 | |||
14 | [1] https://gcc.gnu.org/bugzilla/show_bug.cgi?id=115042 | ||
15 | Upstream-Status: Inappropriate [WORKAROUND] | ||
16 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
17 | --- | ||
18 | none/tests/arm/Makefile.am | 2 +- | ||
19 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
20 | |||
21 | --- a/none/tests/arm/Makefile.am | ||
22 | +++ b/none/tests/arm/Makefile.am | ||
23 | @@ -57,9 +57,9 @@ allexec_CFLAGS = $(AM_CFLAGS) @FLAG_W_N | ||
24 | # Also force -O0 since -O takes hundreds of MB of memory | ||
25 | # for v6intThumb.c. | ||
26 | v6intARM_CFLAGS = $(AM_CFLAGS) -g -O0 -marm | ||
27 | -v6intThumb_CFLAGS = $(AM_CFLAGS) -g -O0 -mthumb | ||
28 | +v6intThumb_CFLAGS = $(AM_CFLAGS) -g -O -mthumb | ||
29 | |||
30 | -v6media_CFLAGS = $(AM_CFLAGS) -g -O0 -mthumb | ||
31 | +v6media_CFLAGS = $(AM_CFLAGS) -g -O -mthumb | ||
32 | |||
33 | v8crypto_a_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -marm -march=armv8-a | ||
34 | v8crypto_t_CFLAGS = $(AM_CFLAGS) -g -O0 -mfpu=crypto-neon-fp-armv8 -mthumb -march=armv8-a | ||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch b/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch deleted file mode 100644 index 80a8e3a10b..0000000000 --- a/meta/recipes-devtools/valgrind/valgrind/0001-valgrind-monitor.py-regular-expressions-should-use-r.patch +++ /dev/null | |||
@@ -1,64 +0,0 @@ | |||
1 | From 027b649fdb831868e71be01cafdacc49a5f419ab Mon Sep 17 00:00:00 2001 | ||
2 | From: Mark Wielaard <mark@klomp.org> | ||
3 | Date: Fri, 17 Nov 2023 14:01:21 +0100 | ||
4 | Subject: [PATCH 1/4] valgrind-monitor.py regular expressions should use raw | ||
5 | strings | ||
6 | |||
7 | With python 3.12 gdb will produce the following SyntaxWarning when | ||
8 | loading valgrind-monitor-def.py: | ||
9 | |||
10 | /usr/share/gdb/auto-load/valgrind-monitor-def.py:214: | ||
11 | SyntaxWarning: invalid escape sequence '\[' | ||
12 | if re.fullmatch("^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str): | ||
13 | |||
14 | In a future python version this will become an SyntaxError. | ||
15 | |||
16 | Use a raw strings for the regular expression. | ||
17 | |||
18 | https://bugs.kde.org/show_bug.cgi?id=476708 | ||
19 | (cherry picked from commit 0fbfbe05028ad18efda786a256a2738d2c231ed4) | ||
20 | |||
21 | Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=027b649fdb831868e71be01cafdacc49a5f419ab] | ||
22 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
23 | --- | ||
24 | NEWS | 13 +++++++++++++ | ||
25 | coregrind/m_gdbserver/valgrind-monitor-def.py | 2 +- | ||
26 | 2 files changed, 14 insertions(+), 1 deletion(-) | ||
27 | |||
28 | diff --git a/NEWS b/NEWS | ||
29 | index f11da4be8..ee5b4ff11 100644 | ||
30 | --- a/NEWS | ||
31 | +++ b/NEWS | ||
32 | @@ -1,3 +1,16 @@ | ||
33 | +Branch 3.22 | ||
34 | +~~~~~~~~~~~ | ||
35 | + | ||
36 | +* ==================== FIXED BUGS ==================== | ||
37 | + | ||
38 | +The following bugs have been fixed or resolved on this branch. | ||
39 | + | ||
40 | +476708 valgrind-monitor.py regular expressions should use raw strings | ||
41 | + | ||
42 | +To see details of a given bug, visit | ||
43 | + https://bugs.kde.org/show_bug.cgi?id=XXXXXX | ||
44 | +where XXXXXX is the bug number as listed above. | ||
45 | + | ||
46 | Release 3.22.0 (31 Oct 2023) | ||
47 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
48 | |||
49 | diff --git a/coregrind/m_gdbserver/valgrind-monitor-def.py b/coregrind/m_gdbserver/valgrind-monitor-def.py | ||
50 | index b4e7b992d..d74b1590c 100644 | ||
51 | --- a/coregrind/m_gdbserver/valgrind-monitor-def.py | ||
52 | +++ b/coregrind/m_gdbserver/valgrind-monitor-def.py | ||
53 | @@ -211,7 +211,7 @@ class Valgrind_ADDR_LEN_opt(Valgrind_Command): | ||
54 | For compatibility reason with the Valgrind gdbserver monitor command, | ||
55 | we detect and accept usages such as 0x1234ABCD[10].""" | ||
56 | def invoke(self, arg_str : str, from_tty : bool) -> None: | ||
57 | - if re.fullmatch("^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str): | ||
58 | + if re.fullmatch(r"^0x[0123456789ABCDEFabcdef]+\[[^\[\]]+\]$", arg_str): | ||
59 | arg_str = arg_str.replace("[", " ") | ||
60 | arg_str = arg_str.replace("]", " ") | ||
61 | eval_execute_2(self, arg_str, | ||
62 | -- | ||
63 | 2.44.0 | ||
64 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch b/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch deleted file mode 100644 index 5759fa039a..0000000000 --- a/meta/recipes-devtools/valgrind/valgrind/0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch +++ /dev/null | |||
@@ -1,147 +0,0 @@ | |||
1 | From 1d00e5ce0fb069911c4b525ec38289fb5d9021b0 Mon Sep 17 00:00:00 2001 | ||
2 | From: Paul Floyd <pjfloyd@wanadoo.fr> | ||
3 | Date: Sat, 18 Nov 2023 08:49:34 +0100 | ||
4 | Subject: [PATCH 2/4] Bug 476548 - valgrind 3.22.0 fails on assertion when | ||
5 | loading debuginfo file produced by mold | ||
6 | |||
7 | (cherry picked from commit 9ea4ae66707a4dcc6f4328e11911652e4418c585) | ||
8 | |||
9 | Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=1d00e5ce0fb069911c4b525ec38289fb5d9021b0] | ||
10 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
11 | --- | ||
12 | NEWS | 2 ++ | ||
13 | coregrind/m_debuginfo/image.c | 14 +++++++++ | ||
14 | coregrind/m_debuginfo/priv_image.h | 4 +++ | ||
15 | coregrind/m_debuginfo/readelf.c | 49 ++++++++++++++++++++++++++++-- | ||
16 | 4 files changed, 66 insertions(+), 3 deletions(-) | ||
17 | |||
18 | diff --git a/NEWS b/NEWS | ||
19 | index ee5b4ff11..6cd13429a 100644 | ||
20 | --- a/NEWS | ||
21 | +++ b/NEWS | ||
22 | @@ -5,6 +5,8 @@ Branch 3.22 | ||
23 | |||
24 | The following bugs have been fixed or resolved on this branch. | ||
25 | |||
26 | +476548 valgrind 3.22.0 fails on assertion when loading debuginfo | ||
27 | + file produced by mold | ||
28 | 476708 valgrind-monitor.py regular expressions should use raw strings | ||
29 | |||
30 | To see details of a given bug, visit | ||
31 | diff --git a/coregrind/m_debuginfo/image.c b/coregrind/m_debuginfo/image.c | ||
32 | index 02e509071..445f95555 100644 | ||
33 | --- a/coregrind/m_debuginfo/image.c | ||
34 | +++ b/coregrind/m_debuginfo/image.c | ||
35 | @@ -1221,6 +1221,20 @@ Int ML_(img_strcmp_c)(DiImage* img, DiOffT off1, const HChar* str2) | ||
36 | } | ||
37 | } | ||
38 | |||
39 | +Int ML_(img_strcmp_n)(DiImage* img, DiOffT off1, const HChar* str2, Word n) | ||
40 | +{ | ||
41 | + ensure_valid(img, off1, 1, "ML_(img_strcmp_c)"); | ||
42 | + while (n) { | ||
43 | + UChar c1 = get(img, off1); | ||
44 | + UChar c2 = *(const UChar*)str2; | ||
45 | + if (c1 < c2) return -1; | ||
46 | + if (c1 > c2) return 1; | ||
47 | + if (c1 == 0) return 0; | ||
48 | + off1++; str2++; --n; | ||
49 | + } | ||
50 | + return 0; | ||
51 | +} | ||
52 | + | ||
53 | UChar ML_(img_get_UChar)(DiImage* img, DiOffT offset) | ||
54 | { | ||
55 | ensure_valid(img, offset, 1, "ML_(img_get_UChar)"); | ||
56 | diff --git a/coregrind/m_debuginfo/priv_image.h b/coregrind/m_debuginfo/priv_image.h | ||
57 | index a49846f14..c91e49f01 100644 | ||
58 | --- a/coregrind/m_debuginfo/priv_image.h | ||
59 | +++ b/coregrind/m_debuginfo/priv_image.h | ||
60 | @@ -115,6 +115,10 @@ Int ML_(img_strcmp)(DiImage* img, DiOffT off1, DiOffT off2); | ||
61 | cast to HChar before comparison. */ | ||
62 | Int ML_(img_strcmp_c)(DiImage* img, DiOffT off1, const HChar* str2); | ||
63 | |||
64 | +/* Do strncmp of a C string in the image vs a normal one. Chars are | ||
65 | + cast to HChar before comparison. */ | ||
66 | +Int ML_(img_strcmp_n)(DiImage* img, DiOffT off1, const HChar* str2, Word n); | ||
67 | + | ||
68 | /* Do strlen of a C string in the image. */ | ||
69 | SizeT ML_(img_strlen)(DiImage* img, DiOffT off); | ||
70 | |||
71 | diff --git a/coregrind/m_debuginfo/readelf.c b/coregrind/m_debuginfo/readelf.c | ||
72 | index fb64ed976..46f8c8343 100644 | ||
73 | --- a/coregrind/m_debuginfo/readelf.c | ||
74 | +++ b/coregrind/m_debuginfo/readelf.c | ||
75 | @@ -2501,8 +2501,7 @@ Bool ML_(read_elf_object) ( struct _DebugInfo* di ) | ||
76 | di->rodata_avma += inrw1->bias; | ||
77 | di->rodata_bias = inrw1->bias; | ||
78 | di->rodata_debug_bias = inrw1->bias; | ||
79 | - } | ||
80 | - else { | ||
81 | + } else { | ||
82 | BAD(".rodata"); /* should not happen? */ | ||
83 | } | ||
84 | di->rodata_present = True; | ||
85 | @@ -2977,6 +2976,46 @@ Bool ML_(read_elf_object) ( struct _DebugInfo* di ) | ||
86 | return retval; | ||
87 | } | ||
88 | |||
89 | +static void find_rodata(Word i, Word shnum, DiImage* dimg, struct _DebugInfo* di, DiOffT shdr_dioff, | ||
90 | + UWord shdr_dent_szB, DiOffT shdr_strtab_dioff, PtrdiffT rw_dbias) | ||
91 | +{ | ||
92 | + ElfXX_Shdr a_shdr; | ||
93 | + ElfXX_Shdr a_extra_shdr; | ||
94 | + ML_(img_get)(&a_shdr, dimg, | ||
95 | + INDEX_BIS(shdr_dioff, i, shdr_dent_szB), | ||
96 | + sizeof(a_shdr)); | ||
97 | + if (di->rodata_present && | ||
98 | + 0 == ML_(img_strcmp_c)(dimg, shdr_strtab_dioff | ||
99 | + + a_shdr.sh_name, ".rodata")) { | ||
100 | + Word sh_size = a_shdr.sh_size; | ||
101 | + Word j; | ||
102 | + Word next_addr = a_shdr.sh_addr + a_shdr.sh_size; | ||
103 | + for (j = i + 1; j < shnum; ++j) { | ||
104 | + ML_(img_get)(&a_extra_shdr, dimg, | ||
105 | + INDEX_BIS(shdr_dioff, j, shdr_dent_szB), | ||
106 | + sizeof(a_shdr)); | ||
107 | + if (0 == ML_(img_strcmp_n)(dimg, shdr_strtab_dioff | ||
108 | + + a_extra_shdr.sh_name, ".rodata", 7)) { | ||
109 | + if (a_extra_shdr.sh_addr == | ||
110 | + VG_ROUNDUP(next_addr, a_extra_shdr.sh_addralign)) { | ||
111 | + sh_size = VG_ROUNDUP(sh_size, a_extra_shdr.sh_addralign) + a_extra_shdr.sh_size; | ||
112 | + } | ||
113 | + next_addr = a_extra_shdr.sh_addr + a_extra_shdr.sh_size; | ||
114 | + } else { | ||
115 | + break; | ||
116 | + } | ||
117 | + } | ||
118 | + vg_assert(di->rodata_size == sh_size); | ||
119 | + vg_assert(di->rodata_avma + a_shdr.sh_addr + rw_dbias); | ||
120 | + di->rodata_debug_svma = a_shdr.sh_addr; | ||
121 | + di->rodata_debug_bias = di->rodata_bias + | ||
122 | + di->rodata_svma - di->rodata_debug_svma; | ||
123 | + TRACE_SYMTAB("acquiring .rodata debug svma = %#lx .. %#lx\n", | ||
124 | + di->rodata_debug_svma, | ||
125 | + di->rodata_debug_svma + di->rodata_size - 1); | ||
126 | + TRACE_SYMTAB("acquiring .rodata debug bias = %#lx\n", (UWord)di->rodata_debug_bias); | ||
127 | + } | ||
128 | +} | ||
129 | Bool ML_(read_elf_debug) ( struct _DebugInfo* di ) | ||
130 | { | ||
131 | Word i, j; | ||
132 | @@ -3391,7 +3430,11 @@ Bool ML_(read_elf_debug) ( struct _DebugInfo* di ) | ||
133 | FIND(text, rx) | ||
134 | FIND(data, rw) | ||
135 | FIND(sdata, rw) | ||
136 | - FIND(rodata, rw) | ||
137 | + // https://bugs.kde.org/show_bug.cgi?id=476548 | ||
138 | + // special handling for rodata as adjacent | ||
139 | + // rodata sections may have been merged in ML_(read_elf_object) | ||
140 | + //FIND(rodata, rw) | ||
141 | + find_rodata(i, ehdr_dimg.e_shnum, dimg, di, shdr_dioff, shdr_dent_szB, shdr_strtab_dioff, rw_dbias); | ||
142 | FIND(bss, rw) | ||
143 | FIND(sbss, rw) | ||
144 | |||
145 | -- | ||
146 | 2.44.0 | ||
147 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch b/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch deleted file mode 100644 index 2a09ca52b7..0000000000 --- a/meta/recipes-devtools/valgrind/valgrind/0003-Add-fchmodat2-syscall-on-linux.patch +++ /dev/null | |||
@@ -1,221 +0,0 @@ | |||
1 | From a43e62dddcf51ec6578a90c5988a41e856b44b05 Mon Sep 17 00:00:00 2001 | ||
2 | From: Mark Wielaard <mark@klomp.org> | ||
3 | Date: Sat, 18 Nov 2023 21:17:02 +0100 | ||
4 | Subject: [PATCH 3/4] Add fchmodat2 syscall on linux | ||
5 | |||
6 | fchmodat2 is a new syscall on linux 6.6. It is a variant of fchmodat | ||
7 | that takes an extra flags argument. | ||
8 | |||
9 | https://bugs.kde.org/show_bug.cgi?id=477198 | ||
10 | |||
11 | (cherry picked from commit 372d09fd9a8d76847c81092ebff71c80fd6c145d) | ||
12 | |||
13 | Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=a43e62dddcf51ec6578a90c5988a41e856b44b05] | ||
14 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
15 | --- | ||
16 | NEWS | 1 + | ||
17 | coregrind/m_syswrap/priv_syswrap-linux.h | 3 +++ | ||
18 | coregrind/m_syswrap/syswrap-amd64-linux.c | 2 ++ | ||
19 | coregrind/m_syswrap/syswrap-arm-linux.c | 2 ++ | ||
20 | coregrind/m_syswrap/syswrap-arm64-linux.c | 2 ++ | ||
21 | coregrind/m_syswrap/syswrap-linux.c | 11 +++++++++++ | ||
22 | coregrind/m_syswrap/syswrap-mips32-linux.c | 2 ++ | ||
23 | coregrind/m_syswrap/syswrap-mips64-linux.c | 1 + | ||
24 | coregrind/m_syswrap/syswrap-nanomips-linux.c | 1 + | ||
25 | coregrind/m_syswrap/syswrap-ppc32-linux.c | 2 ++ | ||
26 | coregrind/m_syswrap/syswrap-ppc64-linux.c | 2 ++ | ||
27 | coregrind/m_syswrap/syswrap-s390x-linux.c | 2 ++ | ||
28 | coregrind/m_syswrap/syswrap-x86-linux.c | 2 ++ | ||
29 | include/vki/vki-scnums-shared-linux.h | 2 ++ | ||
30 | 14 files changed, 35 insertions(+) | ||
31 | |||
32 | diff --git a/NEWS b/NEWS | ||
33 | index 6cd13429a..da0f8c1aa 100644 | ||
34 | --- a/NEWS | ||
35 | +++ b/NEWS | ||
36 | @@ -8,6 +8,7 @@ The following bugs have been fixed or resolved on this branch. | ||
37 | 476548 valgrind 3.22.0 fails on assertion when loading debuginfo | ||
38 | file produced by mold | ||
39 | 476708 valgrind-monitor.py regular expressions should use raw strings | ||
40 | +477198 Add fchmodat2 syscall on linux | ||
41 | |||
42 | To see details of a given bug, visit | ||
43 | https://bugs.kde.org/show_bug.cgi?id=XXXXXX | ||
44 | diff --git a/coregrind/m_syswrap/priv_syswrap-linux.h b/coregrind/m_syswrap/priv_syswrap-linux.h | ||
45 | index 7c9decf5a..798c456c9 100644 | ||
46 | --- a/coregrind/m_syswrap/priv_syswrap-linux.h | ||
47 | +++ b/coregrind/m_syswrap/priv_syswrap-linux.h | ||
48 | @@ -331,6 +331,9 @@ DECL_TEMPLATE(linux, sys_openat2); | ||
49 | // Linux-specific (new in Linux 5.14) | ||
50 | DECL_TEMPLATE(linux, sys_memfd_secret); | ||
51 | |||
52 | +// Since Linux 6.6 | ||
53 | +DECL_TEMPLATE(linux, sys_fchmodat2); | ||
54 | + | ||
55 | /* --------------------------------------------------------------------- | ||
56 | Wrappers for sockets and ipc-ery. These are split into standalone | ||
57 | procedures because x86-linux hides them inside multiplexors | ||
58 | diff --git a/coregrind/m_syswrap/syswrap-amd64-linux.c b/coregrind/m_syswrap/syswrap-amd64-linux.c | ||
59 | index 008600798..fe17d118b 100644 | ||
60 | --- a/coregrind/m_syswrap/syswrap-amd64-linux.c | ||
61 | +++ b/coregrind/m_syswrap/syswrap-amd64-linux.c | ||
62 | @@ -886,6 +886,8 @@ static SyscallTableEntry syscall_table[] = { | ||
63 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
64 | |||
65 | LINXY(__NR_memfd_secret, sys_memfd_secret), // 447 | ||
66 | + | ||
67 | + LINX_(__NR_fchmodat2, sys_fchmodat2), // 452 | ||
68 | }; | ||
69 | |||
70 | SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
71 | diff --git a/coregrind/m_syswrap/syswrap-arm-linux.c b/coregrind/m_syswrap/syswrap-arm-linux.c | ||
72 | index 9a7a1e0d2..811931d3b 100644 | ||
73 | --- a/coregrind/m_syswrap/syswrap-arm-linux.c | ||
74 | +++ b/coregrind/m_syswrap/syswrap-arm-linux.c | ||
75 | @@ -1059,6 +1059,8 @@ static SyscallTableEntry syscall_main_table[] = { | ||
76 | LINX_(__NR_faccessat2, sys_faccessat2), // 439 | ||
77 | |||
78 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
79 | + | ||
80 | + LINX_(__NR_fchmodat2, sys_fchmodat2), // 452 | ||
81 | }; | ||
82 | |||
83 | |||
84 | diff --git a/coregrind/m_syswrap/syswrap-arm64-linux.c b/coregrind/m_syswrap/syswrap-arm64-linux.c | ||
85 | index 6af7bab83..3307bc2ca 100644 | ||
86 | --- a/coregrind/m_syswrap/syswrap-arm64-linux.c | ||
87 | +++ b/coregrind/m_syswrap/syswrap-arm64-linux.c | ||
88 | @@ -840,6 +840,8 @@ static SyscallTableEntry syscall_main_table[] = { | ||
89 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
90 | |||
91 | LINXY(__NR_memfd_secret, sys_memfd_secret), // 447 | ||
92 | + | ||
93 | + LINX_(__NR_fchmodat2, sys_fchmodat2), // 452 | ||
94 | }; | ||
95 | |||
96 | |||
97 | diff --git a/coregrind/m_syswrap/syswrap-linux.c b/coregrind/m_syswrap/syswrap-linux.c | ||
98 | index d571fc327..efa47f2e6 100644 | ||
99 | --- a/coregrind/m_syswrap/syswrap-linux.c | ||
100 | +++ b/coregrind/m_syswrap/syswrap-linux.c | ||
101 | @@ -6059,6 +6059,17 @@ PRE(sys_fchmodat) | ||
102 | PRE_MEM_RASCIIZ( "fchmodat(path)", ARG2 ); | ||
103 | } | ||
104 | |||
105 | +PRE(sys_fchmodat2) | ||
106 | +{ | ||
107 | + PRINT("sys_fchmodat2 ( %ld, %#" FMT_REGWORD "x(%s), %" FMT_REGWORD "u, %" | ||
108 | + FMT_REGWORD "u )", | ||
109 | + SARG1, ARG2, (HChar*)(Addr)ARG2, ARG3, ARG4); | ||
110 | + PRE_REG_READ4(long, "fchmodat2", | ||
111 | + int, dfd, const char *, path, vki_mode_t, mode, | ||
112 | + unsigned int, flags); | ||
113 | + PRE_MEM_RASCIIZ( "fchmodat2(pathname)", ARG2 ); | ||
114 | +} | ||
115 | + | ||
116 | PRE(sys_faccessat) | ||
117 | { | ||
118 | PRINT("sys_faccessat ( %ld, %#" FMT_REGWORD "x(%s), %ld )", | ||
119 | diff --git a/coregrind/m_syswrap/syswrap-mips32-linux.c b/coregrind/m_syswrap/syswrap-mips32-linux.c | ||
120 | index 6268a00dd..74a1f6eac 100644 | ||
121 | --- a/coregrind/m_syswrap/syswrap-mips32-linux.c | ||
122 | +++ b/coregrind/m_syswrap/syswrap-mips32-linux.c | ||
123 | @@ -1143,6 +1143,8 @@ static SyscallTableEntry syscall_main_table[] = { | ||
124 | LINX_ (__NR_faccessat2, sys_faccessat2), // 439 | ||
125 | |||
126 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
127 | + | ||
128 | + LINX_(__NR_fchmodat2, sys_fchmodat2), // 452 | ||
129 | }; | ||
130 | |||
131 | SyscallTableEntry* ML_(get_linux_syscall_entry) (UInt sysno) | ||
132 | diff --git a/coregrind/m_syswrap/syswrap-mips64-linux.c b/coregrind/m_syswrap/syswrap-mips64-linux.c | ||
133 | index 6cdf25893..4e8508b7a 100644 | ||
134 | --- a/coregrind/m_syswrap/syswrap-mips64-linux.c | ||
135 | +++ b/coregrind/m_syswrap/syswrap-mips64-linux.c | ||
136 | @@ -820,6 +820,7 @@ static SyscallTableEntry syscall_main_table[] = { | ||
137 | LINXY (__NR_close_range, sys_close_range), | ||
138 | LINX_ (__NR_faccessat2, sys_faccessat2), | ||
139 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), | ||
140 | + LINX_ (__NR_fchmodat2, sys_fchmodat2), | ||
141 | }; | ||
142 | |||
143 | SyscallTableEntry * ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
144 | diff --git a/coregrind/m_syswrap/syswrap-nanomips-linux.c b/coregrind/m_syswrap/syswrap-nanomips-linux.c | ||
145 | index d724cde74..7859900c1 100644 | ||
146 | --- a/coregrind/m_syswrap/syswrap-nanomips-linux.c | ||
147 | +++ b/coregrind/m_syswrap/syswrap-nanomips-linux.c | ||
148 | @@ -829,6 +829,7 @@ static SyscallTableEntry syscall_main_table[] = { | ||
149 | LINXY (__NR_close_range, sys_close_range), | ||
150 | LINX_ (__NR_faccessat2, sys_faccessat2), | ||
151 | LINXY (__NR_epoll_pwait2, sys_epoll_pwait2), | ||
152 | + LINX_ (__NR_fchmodat2, sys_fchmodat2), | ||
153 | }; | ||
154 | |||
155 | SyscallTableEntry* ML_(get_linux_syscall_entry) (UInt sysno) | ||
156 | diff --git a/coregrind/m_syswrap/syswrap-ppc32-linux.c b/coregrind/m_syswrap/syswrap-ppc32-linux.c | ||
157 | index c0cfef235..1e19116ee 100644 | ||
158 | --- a/coregrind/m_syswrap/syswrap-ppc32-linux.c | ||
159 | +++ b/coregrind/m_syswrap/syswrap-ppc32-linux.c | ||
160 | @@ -1063,6 +1063,8 @@ static SyscallTableEntry syscall_table[] = { | ||
161 | LINX_(__NR_faccessat2, sys_faccessat2), // 439 | ||
162 | |||
163 | LINXY (__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
164 | + | ||
165 | + LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452 | ||
166 | }; | ||
167 | |||
168 | SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
169 | diff --git a/coregrind/m_syswrap/syswrap-ppc64-linux.c b/coregrind/m_syswrap/syswrap-ppc64-linux.c | ||
170 | index f5976f30c..1097212a4 100644 | ||
171 | --- a/coregrind/m_syswrap/syswrap-ppc64-linux.c | ||
172 | +++ b/coregrind/m_syswrap/syswrap-ppc64-linux.c | ||
173 | @@ -1032,6 +1032,8 @@ static SyscallTableEntry syscall_table[] = { | ||
174 | LINX_(__NR_faccessat2, sys_faccessat2), // 439 | ||
175 | |||
176 | LINXY (__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
177 | + | ||
178 | + LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452 | ||
179 | }; | ||
180 | |||
181 | SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
182 | diff --git a/coregrind/m_syswrap/syswrap-s390x-linux.c b/coregrind/m_syswrap/syswrap-s390x-linux.c | ||
183 | index afba154e7..3588672c7 100644 | ||
184 | --- a/coregrind/m_syswrap/syswrap-s390x-linux.c | ||
185 | +++ b/coregrind/m_syswrap/syswrap-s390x-linux.c | ||
186 | @@ -873,6 +873,8 @@ static SyscallTableEntry syscall_table[] = { | ||
187 | LINX_(__NR_faccessat2, sys_faccessat2), // 439 | ||
188 | |||
189 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
190 | + | ||
191 | + LINX_ (__NR_fchmodat2, sys_fchmodat2), // 452 | ||
192 | }; | ||
193 | |||
194 | SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
195 | diff --git a/coregrind/m_syswrap/syswrap-x86-linux.c b/coregrind/m_syswrap/syswrap-x86-linux.c | ||
196 | index da4fd8fa2..58badc6b0 100644 | ||
197 | --- a/coregrind/m_syswrap/syswrap-x86-linux.c | ||
198 | +++ b/coregrind/m_syswrap/syswrap-x86-linux.c | ||
199 | @@ -1658,6 +1658,8 @@ static SyscallTableEntry syscall_table[] = { | ||
200 | LINXY(__NR_epoll_pwait2, sys_epoll_pwait2), // 441 | ||
201 | |||
202 | LINXY(__NR_memfd_secret, sys_memfd_secret), // 447 | ||
203 | + | ||
204 | + LINX_(__NR_fchmodat2, sys_fchmodat2), // 452 | ||
205 | }; | ||
206 | |||
207 | SyscallTableEntry* ML_(get_linux_syscall_entry) ( UInt sysno ) | ||
208 | diff --git a/include/vki/vki-scnums-shared-linux.h b/include/vki/vki-scnums-shared-linux.h | ||
209 | index 542382b53..a4cd87149 100644 | ||
210 | --- a/include/vki/vki-scnums-shared-linux.h | ||
211 | +++ b/include/vki/vki-scnums-shared-linux.h | ||
212 | @@ -50,4 +50,6 @@ | ||
213 | |||
214 | #define __NR_memfd_secret 447 | ||
215 | |||
216 | +#define __NR_fchmodat2 452 | ||
217 | + | ||
218 | #endif | ||
219 | -- | ||
220 | 2.44.0 | ||
221 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch b/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch deleted file mode 100644 index 4e9185508a..0000000000 --- a/meta/recipes-devtools/valgrind/valgrind/0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch +++ /dev/null | |||
@@ -1,137 +0,0 @@ | |||
1 | From 41ff9aa49f6c54c66d0e6b37f265fd9cb0176057 Mon Sep 17 00:00:00 2001 | ||
2 | From: Paul Floyd <pjfloyd@wanadoo.fr> | ||
3 | Date: Sun, 17 Dec 2023 14:18:51 +0100 | ||
4 | Subject: [PATCH 4/4] Bug 478624 - Valgrind incompatibility with binutils-2.42 | ||
5 | on x86 with new nop patterns (unhandled instruction bytes: 0x2E 0x8D 0xB4 | ||
6 | 0x26) | ||
7 | |||
8 | It was a bit of a struggle to get the testcase to build | ||
9 | with both clang and gcc (oddly enough gcc was more difficult) so | ||
10 | I just resorted to using .byte arrays. | ||
11 | |||
12 | (cherry picked from commit d35005cef8ad8207542738812705ceabf137d7e0) | ||
13 | |||
14 | Upstream-Status: Backport [https://sourceware.org/git/?p=valgrind.git;a=commit;h=41ff9aa49f6c54c66d0e6b37f265fd9cb0176057] | ||
15 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
16 | --- | ||
17 | .gitignore | 1 + | ||
18 | NEWS | 2 ++ | ||
19 | VEX/priv/guest_x86_toIR.c | 22 +++++++++++++- | ||
20 | none/tests/x86/Makefile.am | 2 ++ | ||
21 | none/tests/x86/gnu_binutils_nop.c | 34 ++++++++++++++++++++++ | ||
22 | none/tests/x86/gnu_binutils_nop.stderr.exp | 0 | ||
23 | none/tests/x86/gnu_binutils_nop.vgtest | 2 ++ | ||
24 | 7 files changed, 62 insertions(+), 1 deletion(-) | ||
25 | create mode 100644 none/tests/x86/gnu_binutils_nop.c | ||
26 | create mode 100644 none/tests/x86/gnu_binutils_nop.stderr.exp | ||
27 | create mode 100644 none/tests/x86/gnu_binutils_nop.vgtest | ||
28 | |||
29 | --- a/NEWS | ||
30 | +++ b/NEWS | ||
31 | @@ -9,6 +9,8 @@ The following bugs have been fixed or re | ||
32 | file produced by mold | ||
33 | 476708 valgrind-monitor.py regular expressions should use raw strings | ||
34 | 477198 Add fchmodat2 syscall on linux | ||
35 | +478624 Valgrind incompatibility with binutils-2.42 on x86 with new nop patterns | ||
36 | + (unhandled instruction bytes: 0x2E 0x8D 0xB4 0x26) | ||
37 | |||
38 | To see details of a given bug, visit | ||
39 | https://bugs.kde.org/show_bug.cgi?id=XXXXXX | ||
40 | --- a/VEX/priv/guest_x86_toIR.c | ||
41 | +++ b/VEX/priv/guest_x86_toIR.c | ||
42 | @@ -8198,7 +8198,7 @@ DisResult disInstr_X86_WRK ( | ||
43 | delta += 5; | ||
44 | goto decode_success; | ||
45 | } | ||
46 | - /* Don't barf on recent binutils padding, | ||
47 | + /* Don't barf on recent (2010) binutils padding, | ||
48 | all variants of which are: nopw %cs:0x0(%eax,%eax,1) | ||
49 | 66 2e 0f 1f 84 00 00 00 00 00 | ||
50 | 66 66 2e 0f 1f 84 00 00 00 00 00 | ||
51 | @@ -8222,6 +8222,26 @@ DisResult disInstr_X86_WRK ( | ||
52 | goto decode_success; | ||
53 | } | ||
54 | } | ||
55 | + | ||
56 | + /* bug478624 GNU binutils uses a leal of esi into itself with | ||
57 | + a zero offset and CS prefix as an 8 byte no-op (Dec 2023). | ||
58 | + Since the CS prefix is hardly ever used we don't do much | ||
59 | + to decode it, just a few cases for conditional branches. | ||
60 | + So add handling here with other pseudo-no-ops. | ||
61 | + */ | ||
62 | + if (code[0] == 0x2E && code[1] == 0x8D) { | ||
63 | + if (code[2] == 0x74 && code[3] == 0x26 && code[4] == 0x00) { | ||
64 | + DIP("leal %%cs:0(%%esi,%%eiz,1),%%esi\n"); | ||
65 | + delta += 5; | ||
66 | + goto decode_success; | ||
67 | + } | ||
68 | + if (code[2] == 0xB4 && code[3] == 0x26 && code[4] == 0x00 | ||
69 | + && code[5] == 0x00 && code[6] == 0x00 && code[7] == 0x00) { | ||
70 | + DIP("leal %%cs:0(%%esi,%%eiz,1),%%esi\n"); | ||
71 | + delta += 8; | ||
72 | + goto decode_success; | ||
73 | + } | ||
74 | + } | ||
75 | |||
76 | // Intel CET requires the following opcodes to be treated as NOPs | ||
77 | // with any prefix and ModRM, SIB and disp combination: | ||
78 | --- a/none/tests/x86/Makefile.am | ||
79 | +++ b/none/tests/x86/Makefile.am | ||
80 | @@ -52,6 +52,7 @@ EXTRA_DIST = \ | ||
81 | fxtract.stdout.exp fxtract.stderr.exp fxtract.vgtest \ | ||
82 | fxtract.stdout.exp-older-glibc \ | ||
83 | getseg.stdout.exp getseg.stderr.exp getseg.vgtest \ | ||
84 | + gnu_binutils_nop.stderr.exp gnu_binutils_nop.vgtest \ | ||
85 | incdec_alt.stdout.exp incdec_alt.stderr.exp incdec_alt.vgtest \ | ||
86 | int.stderr.exp int.stdout.exp int.disabled \ | ||
87 | $(addsuffix .stderr.exp,$(INSN_TESTS)) \ | ||
88 | @@ -100,6 +101,7 @@ check_PROGRAMS = \ | ||
89 | fpu_lazy_eflags \ | ||
90 | fxtract \ | ||
91 | getseg \ | ||
92 | + gnu_binutils_nop \ | ||
93 | incdec_alt \ | ||
94 | $(INSN_TESTS) \ | ||
95 | int \ | ||
96 | --- /dev/null | ||
97 | +++ b/none/tests/x86/gnu_binutils_nop.c | ||
98 | @@ -0,0 +1,34 @@ | ||
99 | +int main(void) | ||
100 | +{ | ||
101 | + // GNU binutils uses various opcodes as alternatives for nop | ||
102 | + // the idea is that it is faster to execute one large opcode | ||
103 | + // with no side-effects than multiple repetitions of the | ||
104 | + // single byte 'nop'. This gives more choice when code | ||
105 | + // needs to be padded. | ||
106 | + | ||
107 | + // the following is based on | ||
108 | + // https://sourceware.org/cgit/binutils-gdb/tree/gas/config/tc-i386.c#n1256 | ||
109 | + | ||
110 | + // one byte | ||
111 | + __asm__ __volatile__("nop"); | ||
112 | + // two bytes | ||
113 | + __asm__ __volatile__("xchg %ax,%ax"); | ||
114 | + // three bytes | ||
115 | + //__asm__ __volatile__("leal 0(%esi),%esi"); | ||
116 | + __asm__ __volatile__(".byte 0x8d,0x76,0x00"); | ||
117 | + // four bytes | ||
118 | + //__asm__ __volatile__("leal 0(%esi,%eiz),%esi"); | ||
119 | + __asm__ __volatile__(".byte 0x8d,0x74,0x26,0x00"); | ||
120 | + // five bytes | ||
121 | + //__asm__ __volatile__("leal %cs:0(%esi,%eiz),%esi"); | ||
122 | + __asm__ __volatile__(".byte 0x2e,0x8d,0x74,0x26,0x00"); | ||
123 | + // six bytes | ||
124 | + //__asm__ __volatile__("leal 0L(%esi),%esi"); | ||
125 | + __asm__ __volatile__(".byte 0x8d,0xb6,0x00,0x00,0x00,0x00"); | ||
126 | + // seven bytes | ||
127 | + //__asm__ __volatile__("leal 0L(%esi,%eiz),%esi"); | ||
128 | + __asm__ __volatile__(".byte 0x8d,0xb4,0x26,0x00,0x00,0x00,0x00"); | ||
129 | + // eight bytes | ||
130 | + //__asm__ __volatile__("leal %cs:0L(%esi,%eiz),%esi"); | ||
131 | + __asm__ __volatile__(".byte 0x2e,0x8d,0xb4,0x26,0x00,0x00,0x00,0x00"); | ||
132 | +} | ||
133 | --- /dev/null | ||
134 | +++ b/none/tests/x86/gnu_binutils_nop.vgtest | ||
135 | @@ -0,0 +1,2 @@ | ||
136 | +prog: gnu_binutils_nop | ||
137 | +vgopts: -q | ||
diff --git a/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch b/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch index 51cd3532d3..65de427d36 100644 --- a/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch +++ b/meta/recipes-devtools/valgrind/valgrind/Added-support-for-PPC-instructions-mfatbu-mfatbl.patch | |||
@@ -17,11 +17,9 @@ Signed-off-by: Aneesh Bansal <aneesh.bansal@freescale.com> | |||
17 | VEX/priv/guest_ppc_toIR.c | 22 ++++++++++++++++++++++ | 17 | VEX/priv/guest_ppc_toIR.c | 22 ++++++++++++++++++++++ |
18 | 3 files changed, 42 insertions(+), 0 deletions(-) | 18 | 3 files changed, 42 insertions(+), 0 deletions(-) |
19 | 19 | ||
20 | diff --git a/VEX/priv/guest_ppc_defs.h b/VEX/priv/guest_ppc_defs.h | ||
21 | index dd3c62e..11a34aa 100644 | ||
22 | --- a/VEX/priv/guest_ppc_defs.h | 20 | --- a/VEX/priv/guest_ppc_defs.h |
23 | +++ b/VEX/priv/guest_ppc_defs.h | 21 | +++ b/VEX/priv/guest_ppc_defs.h |
24 | @@ -146,6 +146,8 @@ extern UInt ppc32g_dirtyhelper_MFSPR_268_269 ( UInt ); | 22 | @@ -233,6 +233,8 @@ extern UInt ppc32g_dirtyhelper_MFSPR_268 |
25 | 23 | ||
26 | extern UInt ppc32g_dirtyhelper_MFSPR_287 ( void ); | 24 | extern UInt ppc32g_dirtyhelper_MFSPR_287 ( void ); |
27 | 25 | ||
@@ -30,11 +28,9 @@ index dd3c62e..11a34aa 100644 | |||
30 | extern void ppc32g_dirtyhelper_LVS ( VexGuestPPC32State* gst, | 28 | extern void ppc32g_dirtyhelper_LVS ( VexGuestPPC32State* gst, |
31 | UInt vD_idx, UInt sh, | 29 | UInt vD_idx, UInt sh, |
32 | UInt shift_right ); | 30 | UInt shift_right ); |
33 | diff --git a/VEX/priv/guest_ppc_helpers.c b/VEX/priv/guest_ppc_helpers.c | ||
34 | index 11aa428..b49ea3f 100644 | ||
35 | --- a/VEX/priv/guest_ppc_helpers.c | 31 | --- a/VEX/priv/guest_ppc_helpers.c |
36 | +++ b/VEX/priv/guest_ppc_helpers.c | 32 | +++ b/VEX/priv/guest_ppc_helpers.c |
37 | @@ -119,6 +119,24 @@ UInt ppc32g_dirtyhelper_MFSPR_287 ( void ) | 33 | @@ -118,6 +118,24 @@ UInt ppc32g_dirtyhelper_MFSPR_287 ( void |
38 | # endif | 34 | # endif |
39 | } | 35 | } |
40 | 36 | ||
@@ -59,11 +55,9 @@ index 11aa428..b49ea3f 100644 | |||
59 | 55 | ||
60 | /* CALLED FROM GENERATED CODE */ | 56 | /* CALLED FROM GENERATED CODE */ |
61 | /* DIRTY HELPER (reads guest state, writes guest mem) */ | 57 | /* DIRTY HELPER (reads guest state, writes guest mem) */ |
62 | diff --git a/VEX/priv/guest_ppc_toIR.c b/VEX/priv/guest_ppc_toIR.c | ||
63 | index f8d220d..37c8974 100644 | ||
64 | --- a/VEX/priv/guest_ppc_toIR.c | 58 | --- a/VEX/priv/guest_ppc_toIR.c |
65 | +++ b/VEX/priv/guest_ppc_toIR.c | 59 | +++ b/VEX/priv/guest_ppc_toIR.c |
66 | @@ -5657,6 +5657,28 @@ static Bool dis_proc_ctl ( VexAbiInfo* vbi, UInt theInstr ) | 60 | @@ -11875,6 +11875,28 @@ static Bool dis_proc_ctl ( const VexAbiI |
67 | break; | 61 | break; |
68 | } | 62 | } |
69 | 63 | ||
@@ -92,5 +86,3 @@ index f8d220d..37c8974 100644 | |||
92 | default: | 86 | default: |
93 | vex_printf("dis_proc_ctl(ppc)(mfspr,SPR)(0x%x)\n", SPR); | 87 | vex_printf("dis_proc_ctl(ppc)(mfspr,SPR)(0x%x)\n", SPR); |
94 | return False; | 88 | return False; |
95 | -- | ||
96 | 1.7.0.4 | ||
diff --git a/meta/recipes-devtools/valgrind/valgrind/s390x_vec_op_t.patch b/meta/recipes-devtools/valgrind/valgrind/s390x_vec_op_t.patch index bde1241dc7..3ab339d215 100644 --- a/meta/recipes-devtools/valgrind/valgrind/s390x_vec_op_t.patch +++ b/meta/recipes-devtools/valgrind/valgrind/s390x_vec_op_t.patch | |||
@@ -17,19 +17,14 @@ Signed-off-by: Yi Fan Yu <yifan.yu@windriver.com> | |||
17 | VEX/priv/guest_s390_defs.h | 2 +- | 17 | VEX/priv/guest_s390_defs.h | 2 +- |
18 | 1 file changed, 1 insertion(+), 1 deletion(-) | 18 | 1 file changed, 1 insertion(+), 1 deletion(-) |
19 | 19 | ||
20 | diff --git a/VEX/priv/guest_s390_defs.h b/VEX/priv/guest_s390_defs.h | ||
21 | index 9054290..dab8002 100644 | ||
22 | --- a/VEX/priv/guest_s390_defs.h | 20 | --- a/VEX/priv/guest_s390_defs.h |
23 | +++ b/VEX/priv/guest_s390_defs.h | 21 | +++ b/VEX/priv/guest_s390_defs.h |
24 | @@ -284,7 +284,7 @@ typedef enum { | 22 | @@ -284,7 +284,7 @@ typedef enum { |
25 | S390_VEC_OP_VBPERM, | 23 | S390_VEC_OP_VCLFNL, |
26 | S390_VEC_OP_VMSL, | 24 | S390_VEC_OP_VCRNF, |
27 | S390_VEC_OP_LAST // supposed to be the last element in enum | 25 | S390_VEC_OP_LAST // supposed to be the last element in enum |
28 | -} s390x_vec_op_t; | 26 | -} s390x_vec_op_t; |
29 | +}; | 27 | +}; |
30 | 28 | ||
31 | /* Arguments of s390x_dirtyhelper_vec_op(...) which are packed into one | 29 | /* Arguments of s390x_dirtyhelper_vec_op(...) which are packed into one |
32 | ULong variable. | 30 | ULong variable. |
33 | -- | ||
34 | 2.29.2 | ||
35 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch b/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch index f15d04b173..afca874d02 100644 --- a/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch +++ b/meta/recipes-devtools/valgrind/valgrind/use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch | |||
@@ -22,11 +22,9 @@ Signed-off-by: Andre McCurdy <armccurdy@gmail.com> | |||
22 | none/tests/arm/Makefile.am | 6 ++++-- | 22 | none/tests/arm/Makefile.am | 6 ++++-- |
23 | 1 file changed, 4 insertions(+), 2 deletions(-) | 23 | 1 file changed, 4 insertions(+), 2 deletions(-) |
24 | 24 | ||
25 | diff --git a/none/tests/arm/Makefile.am b/none/tests/arm/Makefile.am | ||
26 | index 4507a20..825290f 100644 | ||
27 | --- a/none/tests/arm/Makefile.am | 25 | --- a/none/tests/arm/Makefile.am |
28 | +++ b/none/tests/arm/Makefile.am | 26 | +++ b/none/tests/arm/Makefile.am |
29 | @@ -62,8 +62,10 @@ neon64_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a8 \ | 27 | @@ -87,8 +87,10 @@ neon64_CFLAGS = $(AM_CFLAGS) -g -O0 |
30 | -mfpu=neon \ | 28 | -mfpu=neon \ |
31 | -mthumb | 29 | -mthumb |
32 | 30 | ||
@@ -39,6 +37,3 @@ index 4507a20..825290f 100644 | |||
39 | +vcvt_fixed_float_VFP_CFLAGS = $(AM_CFLAGS) -g -mcpu=cortex-a8 -mfpu=vfpv3 | 37 | +vcvt_fixed_float_VFP_CFLAGS = $(AM_CFLAGS) -g -mcpu=cortex-a8 -mfpu=vfpv3 |
40 | + | 38 | + |
41 | +vfpv4_fma_CFLAGS = $(AM_CFLAGS) -g -O0 -march=armv7ve -mcpu=cortex-a15 -mfpu=vfpv4 -marm | 39 | +vfpv4_fma_CFLAGS = $(AM_CFLAGS) -g -O0 -march=armv7ve -mcpu=cortex-a15 -mfpu=vfpv4 -marm |
42 | -- | ||
43 | 1.9.1 | ||
44 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind/valgrind-make-ld-XXX.so-strlen-intercept-optional.patch b/meta/recipes-devtools/valgrind/valgrind/valgrind-make-ld-XXX.so-strlen-intercept-optional.patch index d04297dca8..0895473fc0 100644 --- a/meta/recipes-devtools/valgrind/valgrind/valgrind-make-ld-XXX.so-strlen-intercept-optional.patch +++ b/meta/recipes-devtools/valgrind/valgrind/valgrind-make-ld-XXX.so-strlen-intercept-optional.patch | |||
@@ -16,11 +16,9 @@ Signed-off-by: Jackie Huang <jackie.huang@windriver.com> | |||
16 | coregrind/m_redir.c | 13 ++++++++++++- | 16 | coregrind/m_redir.c | 13 ++++++++++++- |
17 | 1 file changed, 12 insertions(+), 1 deletion(-) | 17 | 1 file changed, 12 insertions(+), 1 deletion(-) |
18 | 18 | ||
19 | diff --git a/coregrind/m_redir.c b/coregrind/m_redir.c | ||
20 | index ff35009..d7d6816 100644 | ||
21 | --- a/coregrind/m_redir.c | 19 | --- a/coregrind/m_redir.c |
22 | +++ b/coregrind/m_redir.c | 20 | +++ b/coregrind/m_redir.c |
23 | @@ -1275,7 +1275,18 @@ static void add_hardwired_spec (const HChar* sopatt, const HChar* fnpatt, | 21 | @@ -1307,7 +1307,18 @@ static void add_hardwired_spec (const H |
24 | spec->to_addr = to_addr; | 22 | spec->to_addr = to_addr; |
25 | spec->isWrap = False; | 23 | spec->isWrap = False; |
26 | spec->isGlobal = False; | 24 | spec->isGlobal = False; |
@@ -40,6 +38,3 @@ index ff35009..d7d6816 100644 | |||
40 | /* VARIABLE PARTS */ | 38 | /* VARIABLE PARTS */ |
41 | spec->mark = False; /* not significant */ | 39 | spec->mark = False; /* not significant */ |
42 | spec->done = False; /* not significant */ | 40 | spec->done = False; /* not significant */ |
43 | -- | ||
44 | 1.9.1 | ||
45 | |||
diff --git a/meta/recipes-devtools/valgrind/valgrind_3.22.0.bb b/meta/recipes-devtools/valgrind/valgrind_3.23.0.bb index 563d99f0e2..ad9e26cb6b 100644 --- a/meta/recipes-devtools/valgrind/valgrind_3.22.0.bb +++ b/meta/recipes-devtools/valgrind/valgrind_3.23.0.bb | |||
@@ -22,6 +22,7 @@ SRC_URI = "https://sourceware.org/pub/valgrind/valgrind-${PV}.tar.bz2 \ | |||
22 | file://0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch \ | 22 | file://0005-Modify-vg_test-wrapper-to-support-PTEST-formats.patch \ |
23 | file://use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch \ | 23 | file://use-appropriate-march-mcpu-mfpu-for-ARM-test-apps.patch \ |
24 | file://avoid-neon-for-targets-which-don-t-support-it.patch \ | 24 | file://avoid-neon-for-targets-which-don-t-support-it.patch \ |
25 | file://0001-configure-Drop-setting-mcpu-cortex-a8-on-arm.patch \ | ||
25 | file://valgrind-make-ld-XXX.so-strlen-intercept-optional.patch \ | 26 | file://valgrind-make-ld-XXX.so-strlen-intercept-optional.patch \ |
26 | file://0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch \ | 27 | file://0001-makefiles-Drop-setting-mcpu-to-cortex-a8-on-arm-arch.patch \ |
27 | file://0001-sigqueue-Rename-_sifields-to-__si_fields-on-musl.patch \ | 28 | file://0001-sigqueue-Rename-_sifields-to-__si_fields-on-musl.patch \ |
@@ -33,12 +34,9 @@ SRC_URI = "https://sourceware.org/pub/valgrind/valgrind-${PV}.tar.bz2 \ | |||
33 | file://0001-none-tests-fdleak_cmsg.stderr.exp-adjust-tmp-paths.patch \ | 34 | file://0001-none-tests-fdleak_cmsg.stderr.exp-adjust-tmp-paths.patch \ |
34 | file://0001-memcheck-tests-Fix-timerfd-syscall-test.patch \ | 35 | file://0001-memcheck-tests-Fix-timerfd-syscall-test.patch \ |
35 | file://0001-docs-Disable-manual-validation.patch \ | 36 | file://0001-docs-Disable-manual-validation.patch \ |
36 | file://0001-valgrind-monitor.py-regular-expressions-should-use-r.patch \ | 37 | file://0001-tests-arm-Use-O-instead-of-O0.patch \ |
37 | file://0002-Bug-476548-valgrind-3.22.0-fails-on-assertion-when-l.patch \ | ||
38 | file://0003-Add-fchmodat2-syscall-on-linux.patch \ | ||
39 | file://0004-Bug-478624-Valgrind-incompatibility-with-binutils-2..patch \ | ||
40 | " | 38 | " |
41 | SRC_URI[sha256sum] = "c811db5add2c5f729944caf47c4e7a65dcaabb9461e472b578765dd7bf6d2d4c" | 39 | SRC_URI[sha256sum] = "c5c34a3380457b9b75606df890102e7df2c702b9420c2ebef9540f8b5d56264d" |
42 | UPSTREAM_CHECK_REGEX = "valgrind-(?P<pver>\d+(\.\d+)+)\.tar" | 40 | UPSTREAM_CHECK_REGEX = "valgrind-(?P<pver>\d+(\.\d+)+)\.tar" |
43 | 41 | ||
44 | COMPATIBLE_HOST = '(i.86|x86_64|arm|aarch64|mips|powerpc|powerpc64).*-linux' | 42 | COMPATIBLE_HOST = '(i.86|x86_64|arm|aarch64|mips|powerpc|powerpc64).*-linux' |
@@ -196,9 +194,9 @@ do_install_ptest() { | |||
196 | 194 | ||
197 | # The scripts reference config.h so add it to the top ptest dir. | 195 | # The scripts reference config.h so add it to the top ptest dir. |
198 | cp ${B}/config.h ${D}${PTEST_PATH} | 196 | cp ${B}/config.h ${D}${PTEST_PATH} |
199 | install -D ${WORKDIR}/remove-for-aarch64 ${D}${PTEST_PATH} | 197 | install -D ${UNPACKDIR}/remove-for-aarch64 ${D}${PTEST_PATH} |
200 | install -D ${WORKDIR}/remove-for-all ${D}${PTEST_PATH} | 198 | install -D ${UNPACKDIR}/remove-for-all ${D}${PTEST_PATH} |
201 | install -D ${WORKDIR}/taskset_nondeterministic_tests ${D}${PTEST_PATH} | 199 | install -D ${UNPACKDIR}/taskset_nondeterministic_tests ${D}${PTEST_PATH} |
202 | 200 | ||
203 | # Add an executable need by none/tests/bigcode | 201 | # Add an executable need by none/tests/bigcode |
204 | mkdir ${D}${PTEST_PATH}/perf | 202 | mkdir ${D}${PTEST_PATH}/perf |
diff --git a/meta/recipes-extended/at/at_3.2.5.bb b/meta/recipes-extended/at/at_3.2.5.bb index 0162548d33..112d1c4adc 100644 --- a/meta/recipes-extended/at/at_3.2.5.bb +++ b/meta/recipes-extended/at/at_3.2.5.bb | |||
@@ -70,7 +70,7 @@ do_install () { | |||
70 | sed -i -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_system_unitdir}/atd.service | 70 | sed -i -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_system_unitdir}/atd.service |
71 | 71 | ||
72 | if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then | 72 | if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then |
73 | install -D -m 0644 ${UNPACKDIR}/${BP}/pam.conf ${D}${sysconfdir}/pam.d/atd | 73 | install -D -m 0644 ${S}/pam.conf ${D}${sysconfdir}/pam.d/atd |
74 | fi | 74 | fi |
75 | rm -f ${D}${datadir}/at/batch-job | 75 | rm -f ${D}${datadir}/at/batch-job |
76 | } | 76 | } |
diff --git a/meta/recipes-extended/bash/bash.inc b/meta/recipes-extended/bash/bash.inc index e541161c75..66058feff3 100644 --- a/meta/recipes-extended/bash/bash.inc +++ b/meta/recipes-extended/bash/bash.inc | |||
@@ -108,7 +108,7 @@ do_install_ptest () { | |||
108 | cp ${B}/config.h ${D}${PTEST_PATH} | 108 | cp ${B}/config.h ${D}${PTEST_PATH} |
109 | cp ${B}/version.h ${D}${PTEST_PATH} | 109 | cp ${B}/version.h ${D}${PTEST_PATH} |
110 | cp ${S}/y.tab.[ch] ${D}${PTEST_PATH} | 110 | cp ${S}/y.tab.[ch] ${D}${PTEST_PATH} |
111 | install -D ${WORKDIR}/run-bash-ptests ${D}${PTEST_PATH}/run-bash-ptests | 111 | install -D ${UNPACKDIR}/run-bash-ptests ${D}${PTEST_PATH}/run-bash-ptests |
112 | sed -i -e 's/^Makefile/_Makefile/' -e "s,--sysroot=${STAGING_DIR_TARGET},,g" \ | 112 | sed -i -e 's/^Makefile/_Makefile/' -e "s,--sysroot=${STAGING_DIR_TARGET},,g" \ |
113 | -e 's|${DEBUG_PREFIX_MAP}||g' \ | 113 | -e 's|${DEBUG_PREFIX_MAP}||g' \ |
114 | -e 's|${BUILD_LDFLAGS}||g' \ | 114 | -e 's|${BUILD_LDFLAGS}||g' \ |
diff --git a/meta/recipes-extended/bzip2/bzip2_1.0.8.bb b/meta/recipes-extended/bzip2/bzip2_1.0.8.bb index 4e3a06f240..7d7ab3856b 100644 --- a/meta/recipes-extended/bzip2/bzip2_1.0.8.bb +++ b/meta/recipes-extended/bzip2/bzip2_1.0.8.bb | |||
@@ -14,11 +14,11 @@ LICENSE:libbz2 = "bzip2-1.0.6" | |||
14 | LICENSE:${PN}-ptest = "bzip2-1.0.6 & GPL-3.0-or-later & Apache-2.0 & MS-PL & BSD-3-Clause & Zlib" | 14 | LICENSE:${PN}-ptest = "bzip2-1.0.6 & GPL-3.0-or-later & Apache-2.0 & MS-PL & BSD-3-Clause & Zlib" |
15 | 15 | ||
16 | LIC_FILES_CHKSUM = "file://LICENSE;beginline=4;endline=37;md5=600af43c50f1fcb82e32f19b32df4664 \ | 16 | LIC_FILES_CHKSUM = "file://LICENSE;beginline=4;endline=37;md5=600af43c50f1fcb82e32f19b32df4664 \ |
17 | file://${WORKDIR}/git/commons-compress/LICENSE.txt;md5=86d3f3a95c324c9479bd8986968f4327 \ | 17 | file://${UNPACKDIR}/git/commons-compress/LICENSE.txt;md5=86d3f3a95c324c9479bd8986968f4327 \ |
18 | file://${WORKDIR}/git/dotnetzip/License.txt;md5=9cb56871eed4e748c3bc7e8ff352a54f \ | 18 | file://${UNPACKDIR}/git/dotnetzip/License.txt;md5=9cb56871eed4e748c3bc7e8ff352a54f \ |
19 | file://${WORKDIR}/git/dotnetzip/License.zlib.txt;md5=cc421ccd22eeb2e5db6b79e6de0a029f \ | 19 | file://${UNPACKDIR}/git/dotnetzip/License.zlib.txt;md5=cc421ccd22eeb2e5db6b79e6de0a029f \ |
20 | file://${WORKDIR}/git/go/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707 \ | 20 | file://${UNPACKDIR}/git/go/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707 \ |
21 | file://${WORKDIR}/git/lbzip2/COPYING;md5=d32239bcb673463ab874e80d47fae504 \ | 21 | file://${UNPACKDIR}/git/lbzip2/COPYING;md5=d32239bcb673463ab874e80d47fae504 \ |
22 | " | 22 | " |
23 | 23 | ||
24 | SRC_URI = "https://sourceware.org/pub/${BPN}/${BPN}-${PV}.tar.gz \ | 24 | SRC_URI = "https://sourceware.org/pub/${BPN}/${BPN}-${PV}.tar.gz \ |
@@ -52,13 +52,13 @@ do_configure:prepend () { | |||
52 | 52 | ||
53 | do_install_ptest () { | 53 | do_install_ptest () { |
54 | install -d ${D}${PTEST_PATH}/bzip2-tests | 54 | install -d ${D}${PTEST_PATH}/bzip2-tests |
55 | cp -r ${WORKDIR}/git/commons-compress ${D}${PTEST_PATH}/bzip2-tests/commons-compress | 55 | cp -r ${UNPACKDIR}/git/commons-compress ${D}${PTEST_PATH}/bzip2-tests/commons-compress |
56 | cp -r ${WORKDIR}/git/dotnetzip ${D}${PTEST_PATH}/bzip2-tests/dotnetzip | 56 | cp -r ${UNPACKDIR}/git/dotnetzip ${D}${PTEST_PATH}/bzip2-tests/dotnetzip |
57 | cp -r ${WORKDIR}/git/go ${D}${PTEST_PATH}/bzip2-tests/go | 57 | cp -r ${UNPACKDIR}/git/go ${D}${PTEST_PATH}/bzip2-tests/go |
58 | cp -r ${WORKDIR}/git/lbzip2 ${D}${PTEST_PATH}/bzip2-tests/lbzip2 | 58 | cp -r ${UNPACKDIR}/git/lbzip2 ${D}${PTEST_PATH}/bzip2-tests/lbzip2 |
59 | cp -r ${WORKDIR}/git/pyflate ${D}${PTEST_PATH}/bzip2-tests/pyflate | 59 | cp -r ${UNPACKDIR}/git/pyflate ${D}${PTEST_PATH}/bzip2-tests/pyflate |
60 | cp ${WORKDIR}/git/README ${D}${PTEST_PATH}/bzip2-tests/ | 60 | cp ${UNPACKDIR}/git/README ${D}${PTEST_PATH}/bzip2-tests/ |
61 | cp ${WORKDIR}/git/run-tests.sh ${D}${PTEST_PATH}/bzip2-tests/ | 61 | cp ${UNPACKDIR}/git/run-tests.sh ${D}${PTEST_PATH}/bzip2-tests/ |
62 | sed -i -e "s|^Makefile:|_Makefile:|" ${D}${PTEST_PATH}/Makefile | 62 | sed -i -e "s|^Makefile:|_Makefile:|" ${D}${PTEST_PATH}/Makefile |
63 | } | 63 | } |
64 | 64 | ||
diff --git a/meta/recipes-extended/cpio/cpio_2.15.bb b/meta/recipes-extended/cpio/cpio_2.15.bb index 52070f59a2..bfaf5c31c3 100644 --- a/meta/recipes-extended/cpio/cpio_2.15.bb +++ b/meta/recipes-extended/cpio/cpio_2.15.bb | |||
@@ -16,6 +16,7 @@ SRC_URI[sha256sum] = "efa50ef983137eefc0a02fdb51509d624b5e3295c980aa127ceee41834 | |||
16 | inherit autotools gettext texinfo ptest | 16 | inherit autotools gettext texinfo ptest |
17 | 17 | ||
18 | CVE_STATUS[CVE-2010-4226] = "not-applicable-platform: Issue applies to use of cpio in SUSE/OBS" | 18 | CVE_STATUS[CVE-2010-4226] = "not-applicable-platform: Issue applies to use of cpio in SUSE/OBS" |
19 | CVE_STATUS[CVE-2023-7216] = "disputed: intended behaviour, see https://lists.gnu.org/archive/html/bug-cpio/2024-03/msg00000.html" | ||
19 | 20 | ||
20 | EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}" | 21 | EXTRA_OECONF += "DEFAULT_RMT_DIR=${sbindir}" |
21 | 22 | ||
@@ -50,7 +51,7 @@ do_install_ptest() { | |||
50 | install --mode=755 ${B}/tests/atlocal ${D}${PTEST_PATH}/tests/ | 51 | install --mode=755 ${B}/tests/atlocal ${D}${PTEST_PATH}/tests/ |
51 | install --mode=755 ${B}/tests/genfile ${D}${PTEST_PATH}/tests/ | 52 | install --mode=755 ${B}/tests/genfile ${D}${PTEST_PATH}/tests/ |
52 | install --mode=755 ${S}/tests/testsuite ${D}${PTEST_PATH}/tests/ | 53 | install --mode=755 ${S}/tests/testsuite ${D}${PTEST_PATH}/tests/ |
53 | install --mode=755 ${WORKDIR}/test.sh ${D}${PTEST_PATH}/test.sh | 54 | install --mode=755 ${UNPACKDIR}/test.sh ${D}${PTEST_PATH}/test.sh |
54 | sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/test.sh | 55 | sed -i "s#@PTEST_PATH@#${PTEST_PATH}#g" ${D}${PTEST_PATH}/test.sh |
55 | } | 56 | } |
56 | 57 | ||
diff --git a/meta/recipes-extended/cups/cups_2.4.7.bb b/meta/recipes-extended/cups/cups_2.4.8.bb index f4b0282e4c..c0cddf7e99 100644 --- a/meta/recipes-extended/cups/cups_2.4.7.bb +++ b/meta/recipes-extended/cups/cups_2.4.8.bb | |||
@@ -2,4 +2,4 @@ require cups.inc | |||
2 | 2 | ||
3 | LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57" | 3 | LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57" |
4 | 4 | ||
5 | SRC_URI[sha256sum] = "dd54228dd903526428ce7e37961afaed230ad310788141da75cebaa08362cf6c" | 5 | SRC_URI[sha256sum] = "75c326b4ba73975efcc9a25078c4b04cdb4ee333caaad0d0823dbd522c6479a0" |
diff --git a/meta/recipes-extended/ed/ed_1.20.1.bb b/meta/recipes-extended/ed/ed_1.20.2.bb index 9ae53002c3..2b78b080ba 100644 --- a/meta/recipes-extended/ed/ed_1.20.1.bb +++ b/meta/recipes-extended/ed/ed_1.20.2.bb | |||
@@ -19,7 +19,7 @@ bindir = "${base_bindir}" | |||
19 | SRC_URI = "${GNU_MIRROR}/ed/${BP}.tar.lz" | 19 | SRC_URI = "${GNU_MIRROR}/ed/${BP}.tar.lz" |
20 | UPSTREAM_CHECK_URI = "${GNU_MIRROR}/ed/" | 20 | UPSTREAM_CHECK_URI = "${GNU_MIRROR}/ed/" |
21 | 21 | ||
22 | SRC_URI[sha256sum] = "b1a463b297a141f9876c4b1fcd01477f645cded92168090e9a35db2af4babbca" | 22 | SRC_URI[sha256sum] = "65fec7318f48c2ca17f334ac0f4703defe62037bb13cc23920de077b5fa24523" |
23 | 23 | ||
24 | EXTRA_OEMAKE = "-e MAKEFLAGS=" | 24 | EXTRA_OEMAKE = "-e MAKEFLAGS=" |
25 | 25 | ||
diff --git a/meta/recipes-extended/gawk/gawk/0001-m4-readline-add-missing-includes.patch b/meta/recipes-extended/gawk/gawk/0001-m4-readline-add-missing-includes.patch new file mode 100644 index 0000000000..5be2fd97ee --- /dev/null +++ b/meta/recipes-extended/gawk/gawk/0001-m4-readline-add-missing-includes.patch | |||
@@ -0,0 +1,38 @@ | |||
1 | From 4f4e84f139e2a8682f1374a592f2636c43ad857b Mon Sep 17 00:00:00 2001 | ||
2 | From: Ross Burton <ross.burton@arm.com> | ||
3 | Date: Tue, 21 May 2024 15:10:11 +0000 | ||
4 | Subject: [PATCH] m4/readline: add missing includes | ||
5 | |||
6 | The cross-specific code fragment only includes stdio.h, where the native | ||
7 | fragment also includes fcntl.h and unistd.h. This is important because | ||
8 | GCC 14.1 has made the implicit definitions an error: | ||
9 | |||
10 | conftest.c: In function 'main': | ||
11 | conftest.c:144:9: error: implicit declaration of function 'close'; did you mean 'pclose'? [-Wimplicit-function-declaration] | ||
12 | conftest.c:146:14: error: implicit declaration of function 'open'; did you mean 'popen'? [-Wimplicit-function-declaration] | ||
13 | |||
14 | Add the missing includes so that the check doesn't always fail due to | ||
15 | these errors. | ||
16 | |||
17 | Upstream-Status: Submitted [https://lists.gnu.org/archive/html/bug-gawk/2024-05/msg00000.html] | ||
18 | Signed-off-by: Ross Burton <ross.burton@arm.com> | ||
19 | --- | ||
20 | m4/readline.m4 | 2 ++ | ||
21 | 1 file changed, 2 insertions(+) | ||
22 | |||
23 | diff --git a/m4/readline.m4 b/m4/readline.m4 | ||
24 | index 38f96326..efd52d4e 100644 | ||
25 | --- a/m4/readline.m4 | ||
26 | +++ b/m4/readline.m4 | ||
27 | @@ -66,6 +66,8 @@ dnl action if false: | ||
28 | dnl action if cross compiling: | ||
29 | [AC_LINK_IFELSE( | ||
30 | [AC_LANG_PROGRAM([[#include <stdio.h> | ||
31 | +#include <fcntl.h> | ||
32 | +#include <unistd.h> | ||
33 | #include <readline/readline.h> | ||
34 | #include <readline/history.h>]], dnl includes | ||
35 | dnl function body | ||
36 | -- | ||
37 | 2.34.1 | ||
38 | |||
diff --git a/meta/recipes-extended/gawk/gawk_5.3.0.bb b/meta/recipes-extended/gawk/gawk_5.3.0.bb index d7a0fc616d..e94cf19db4 100644 --- a/meta/recipes-extended/gawk/gawk_5.3.0.bb +++ b/meta/recipes-extended/gawk/gawk_5.3.0.bb | |||
@@ -16,6 +16,7 @@ PACKAGECONFIG[readline] = "--with-readline,--without-readline,readline" | |||
16 | PACKAGECONFIG[mpfr] = "--with-mpfr,--without-mpfr, mpfr" | 16 | PACKAGECONFIG[mpfr] = "--with-mpfr,--without-mpfr, mpfr" |
17 | 17 | ||
18 | SRC_URI = "${GNU_MIRROR}/gawk/gawk-${PV}.tar.gz \ | 18 | SRC_URI = "${GNU_MIRROR}/gawk/gawk-${PV}.tar.gz \ |
19 | file://0001-m4-readline-add-missing-includes.patch \ | ||
19 | file://run-ptest \ | 20 | file://run-ptest \ |
20 | " | 21 | " |
21 | 22 | ||
diff --git a/meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb b/meta/recipes-extended/ghostscript/ghostscript_10.03.1.bb index ff7d38676e..0504f5244f 100644 --- a/meta/recipes-extended/ghostscript/ghostscript_10.03.0.bb +++ b/meta/recipes-extended/ghostscript/ghostscript_10.03.1.bb | |||
@@ -27,7 +27,7 @@ SRC_URI = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/downlo | |||
27 | file://avoid-host-contamination.patch \ | 27 | file://avoid-host-contamination.patch \ |
28 | " | 28 | " |
29 | 29 | ||
30 | SRC_URI[sha256sum] = "6f2bc61023469fcf7c7c2d7f1bdd75b75f2b41836aa1d5e641396246d4abbb59" | 30 | SRC_URI[sha256sum] = "31cd01682ad23a801cc3bbc222a55f07c4ea3e068bdfb447792d54db21a2e8ad" |
31 | 31 | ||
32 | PACKAGECONFIG ??= "" | 32 | PACKAGECONFIG ??= "" |
33 | PACKAGECONFIG[gtk] = "--enable-gtk,--disable-gtk,gtk+3" | 33 | PACKAGECONFIG[gtk] = "--enable-gtk,--disable-gtk,gtk+3" |
diff --git a/meta/recipes-extended/go-examples/go-helloworld_0.1.bb b/meta/recipes-extended/go-examples/go-helloworld_0.1.bb index 74f3520eae..575ee81b50 100644 --- a/meta/recipes-extended/go-examples/go-helloworld_0.1.bb +++ b/meta/recipes-extended/go-examples/go-helloworld_0.1.bb | |||
@@ -5,7 +5,7 @@ HOMEPAGE = "https://golang.org/" | |||
5 | LICENSE = "MIT" | 5 | LICENSE = "MIT" |
6 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" | 6 | LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" |
7 | 7 | ||
8 | SRC_URI = "git://go.googlesource.com/example;branch=master;protocol=https" | 8 | SRC_URI = "git://go.googlesource.com/example;branch=master;protocol=https;destsuffix=${GO_SRCURI_DESTSUFFIX}" |
9 | SRCREV = "32022caedd6a177a7717aa8680cbe179e1045935" | 9 | SRCREV = "32022caedd6a177a7717aa8680cbe179e1045935" |
10 | UPSTREAM_CHECK_COMMITS = "1" | 10 | UPSTREAM_CHECK_COMMITS = "1" |
11 | 11 | ||
diff --git a/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch b/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch index 8824bf2af7..0fe2261511 100644 --- a/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch +++ b/meta/recipes-extended/iptables/iptables/0001-configure-Add-option-to-enable-disable-libnfnetlink.patch | |||
@@ -1,22 +1,24 @@ | |||
1 | From 0096c854d5015918ed154dccb3ad472fd06c1010 Mon Sep 17 00:00:00 2001 | 1 | From 653db8b938166db7833135f615b90c38a3f27a30 Mon Sep 17 00:00:00 2001 |
2 | From: "Maxin B. John" <maxin.john@intel.com> | 2 | From: "Maxin B. John" <maxin.john@intel.com> |
3 | Date: Tue, 21 Feb 2017 11:16:31 +0200 | 3 | Date: Thu, 25 Apr 2024 10:51:02 +0200 |
4 | Subject: [PATCH] configure: Add option to enable/disable libnfnetlink | 4 | Subject: [PATCH] configure: Add option to enable/disable libnfnetlink |
5 | 5 | ||
6 | This changes the configure behaviour from autodetecting | 6 | Default behavior (autodetecting) does not change, but specifying |
7 | for libnfnetlink to having an option to disable it explicitly | 7 | either option would explicitly disable or enable libnfnetlink support, |
8 | 8 | and if the library is not found in the latter case, ./configure will error | |
9 | Upstream-Status: Pending | 9 | out. |
10 | 10 | ||
11 | Upstream-Status: Backport [https://git.netfilter.org/iptables/commit/?id=653db8b938166db7833135f615b90c38a3f27a30] | ||
11 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 12 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
12 | Signed-off-by: Maxin B. John <maxin.john@intel.com> | 13 | Signed-off-by: Maxin B. John <maxin.john@intel.com> |
13 | 14 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | |
15 | Signed-off-by: Phil Sutter <phil@nwl.cc> | ||
14 | --- | 16 | --- |
15 | configure.ac | 10 +++++++--- | 17 | configure.ac | 13 +++++++++++-- |
16 | 1 file changed, 7 insertions(+), 3 deletions(-) | 18 | 1 file changed, 11 insertions(+), 2 deletions(-) |
17 | 19 | ||
18 | diff --git a/configure.ac b/configure.ac | 20 | diff --git a/configure.ac b/configure.ac |
19 | index d99fa3b..d607772 100644 | 21 | index d99fa3b9..2293702b 100644 |
20 | --- a/configure.ac | 22 | --- a/configure.ac |
21 | +++ b/configure.ac | 23 | +++ b/configure.ac |
22 | @@ -63,6 +63,9 @@ AC_ARG_WITH([pkgconfigdir], AS_HELP_STRING([--with-pkgconfigdir=PATH], | 24 | @@ -63,6 +63,9 @@ AC_ARG_WITH([pkgconfigdir], AS_HELP_STRING([--with-pkgconfigdir=PATH], |
@@ -25,21 +27,27 @@ index d99fa3b..d607772 100644 | |||
25 | [enable_nftables="$enableval"], [enable_nftables="yes"]) | 27 | [enable_nftables="$enableval"], [enable_nftables="yes"]) |
26 | +AC_ARG_ENABLE([libnfnetlink], | 28 | +AC_ARG_ENABLE([libnfnetlink], |
27 | + AS_HELP_STRING([--disable-libnfnetlink], [Do not use netfilter netlink library]), | 29 | + AS_HELP_STRING([--disable-libnfnetlink], [Do not use netfilter netlink library]), |
28 | + [enable_libnfnetlink="$enableval"], [enable_libnfnetlink="yes"]) | 30 | + [enable_libnfnetlink="$enableval"], [enable_libnfnetlink="auto"]) |
29 | AC_ARG_ENABLE([connlabel], | 31 | AC_ARG_ENABLE([connlabel], |
30 | AS_HELP_STRING([--disable-connlabel], | 32 | AS_HELP_STRING([--disable-connlabel], |
31 | [Do not build libnetfilter_conntrack]), | 33 | [Do not build libnetfilter_conntrack]), |
32 | @@ -113,9 +116,10 @@ AM_CONDITIONAL([ENABLE_SYNCONF], [test "$enable_nfsynproxy" = "yes"]) | 34 | @@ -113,8 +116,14 @@ AM_CONDITIONAL([ENABLE_SYNCONF], [test "$enable_nfsynproxy" = "yes"]) |
33 | AM_CONDITIONAL([ENABLE_NFTABLES], [test "$enable_nftables" = "yes"]) | 35 | AM_CONDITIONAL([ENABLE_NFTABLES], [test "$enable_nftables" = "yes"]) |
34 | AM_CONDITIONAL([ENABLE_CONNLABEL], [test "$enable_connlabel" = "yes"]) | 36 | AM_CONDITIONAL([ENABLE_CONNLABEL], [test "$enable_connlabel" = "yes"]) |
35 | 37 | ||
36 | -PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0], | 38 | -PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0], |
37 | - [nfnetlink=1], [nfnetlink=0]) | 39 | - [nfnetlink=1], [nfnetlink=0]) |
38 | -AM_CONDITIONAL([HAVE_LIBNFNETLINK], [test "$nfnetlink" = 1]) | 40 | +# If specified explicitly on the command line, error out when library was not found |
39 | +AS_IF([test "x$enable_libnfnetlink" = "xyes"], [ | 41 | +# Otherwise, disable and continue |
40 | + PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0]) | 42 | +AS_IF([test "x$enable_libnfnetlink" = "xyes"], |
41 | + ]) | 43 | + [PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0], |
42 | +AM_CONDITIONAL([HAVE_LIBNFNETLINK], [test "x$enable_libnfnetlink" = "xyes"]) | 44 | + [nfnetlink=1])], |
45 | + [test "x$enable_libnfnetlink" = "xauto"], | ||
46 | + [PKG_CHECK_MODULES([libnfnetlink], [libnfnetlink >= 1.0], | ||
47 | + [nfnetlink=1], [nfnetlink=0])]) | ||
48 | AM_CONDITIONAL([HAVE_LIBNFNETLINK], [test "$nfnetlink" = 1]) | ||
43 | 49 | ||
44 | if test "x$enable_bpfc" = "xyes" || test "x$enable_nfsynproxy" = "xyes"; then | 50 | if test "x$enable_bpfc" = "xyes" || test "x$enable_nfsynproxy" = "xyes"; then |
45 | PKG_CHECK_MODULES([libpcap], [libpcap], [], [ | 51 | -- |
52 | 2.39.2 | ||
53 | |||
diff --git a/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch b/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch deleted file mode 100644 index a190c7e8ae..0000000000 --- a/meta/recipes-extended/iptables/iptables/0002-iptables-xshared.h-add-missing-sys.types.h-include.patch +++ /dev/null | |||
@@ -1,31 +0,0 @@ | |||
1 | From 465e3ef77f1763d225adc76220e43ee9bd73b178 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alexander Kanavin <alex@linutronix.de> | ||
3 | Date: Tue, 17 May 2022 10:56:59 +0200 | ||
4 | Subject: [PATCH] iptables/xshared.h: add missing sys.types.h include | ||
5 | |||
6 | This resolves the build error under musl: | ||
7 | |||
8 | | ../../../../../../../workspace/sources/iptables/iptables/xshared.h:83:56: error: unknown type name 'u_int16_t'; did you mean 'uint16_t'? | ||
9 | | 83 | set_option(unsigned int *options, unsigned int option, u_int16_t *invflg, | ||
10 | | | ^~~~~~~~~ | ||
11 | | | uint16_t | ||
12 | |||
13 | Upstream-Status: Submitted [via email to phil@nwl.cc] | ||
14 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | ||
15 | |||
16 | --- | ||
17 | iptables/xshared.h | 1 + | ||
18 | 1 file changed, 1 insertion(+) | ||
19 | |||
20 | diff --git a/iptables/xshared.h b/iptables/xshared.h | ||
21 | index a200e0d..f543dbf 100644 | ||
22 | --- a/iptables/xshared.h | ||
23 | +++ b/iptables/xshared.h | ||
24 | @@ -6,6 +6,7 @@ | ||
25 | #include <stdint.h> | ||
26 | #include <netinet/in.h> | ||
27 | #include <net/if.h> | ||
28 | +#include <sys/types.h> | ||
29 | #include <linux/netfilter_arp/arp_tables.h> | ||
30 | #include <linux/netfilter_ipv4/ip_tables.h> | ||
31 | #include <linux/netfilter_ipv6/ip6_tables.h> | ||
diff --git a/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch b/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch deleted file mode 100644 index 5a022ebc8c..0000000000 --- a/meta/recipes-extended/iptables/iptables/0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch +++ /dev/null | |||
@@ -1,49 +0,0 @@ | |||
1 | From 6832501bbb90a3dab977a4625d0391804c0e795c Mon Sep 17 00:00:00 2001 | ||
2 | From: "Maxin B. John" <maxin.john@intel.com> | ||
3 | Date: Tue, 21 Feb 2017 11:49:07 +0200 | ||
4 | Subject: [PATCH] configure.ac: | ||
5 | only-check-conntrack-when-libnfnetlink-enabled.patch | ||
6 | |||
7 | Package libnetfilter-conntrack depends on package libnfnetlink. iptables | ||
8 | checks package libnetfilter-conntrack whatever its package config | ||
9 | libnfnetlink is enabled or not. When libnfnetlink is disabled but | ||
10 | package libnetfilter-conntrack exists, it fails randomly with: | ||
11 | |||
12 | In file included from | ||
13 | .../iptables/1.4.21-r0/iptables-1.4.21/extensions/libxt_connlabel.c:8:0: | ||
14 | |||
15 | .../tmp/sysroots/qemumips/usr/include/libnetfilter_conntrack/libnetfilter_conntrack.h:14:42: | ||
16 | fatal error: libnfnetlink/linux_nfnetlink.h: No such file or directory | ||
17 | |||
18 | compilation terminated. | ||
19 | GNUmakefile:96: recipe for target 'libxt_connlabel.oo' failed | ||
20 | Only check libnetfilter-conntrack when libnfnetlink is enabled to fix it. | ||
21 | |||
22 | Upstream-Status: Pending | ||
23 | |||
24 | Signed-off-by: Kai Kang <kai.kang@windriver.com> | ||
25 | Signed-off-by: Maxin B. John <maxin.john@intel.com> | ||
26 | |||
27 | --- | ||
28 | configure.ac | 6 ++++-- | ||
29 | 1 file changed, 4 insertions(+), 2 deletions(-) | ||
30 | |||
31 | diff --git a/configure.ac b/configure.ac | ||
32 | index d607772..25a8e75 100644 | ||
33 | --- a/configure.ac | ||
34 | +++ b/configure.ac | ||
35 | @@ -159,10 +159,12 @@ if test "$nftables" != 1; then | ||
36 | fi | ||
37 | |||
38 | if test "x$enable_connlabel" = "xyes"; then | ||
39 | - PKG_CHECK_MODULES([libnetfilter_conntrack], | ||
40 | + nfconntrack=0 | ||
41 | + AS_IF([test "x$enable_libnfnetlink" = "xyes"], [ | ||
42 | + PKG_CHECK_MODULES([libnetfilter_conntrack], | ||
43 | [libnetfilter_conntrack >= 1.0.6], | ||
44 | [nfconntrack=1], [nfconntrack=0]) | ||
45 | - | ||
46 | + ]) | ||
47 | if test "$nfconntrack" -ne 1; then | ||
48 | blacklist_modules="$blacklist_modules connlabel"; | ||
49 | echo "WARNING: libnetfilter_conntrack not found, connlabel match will not be built"; | ||
diff --git a/meta/recipes-extended/iptables/iptables_1.8.10.bb b/meta/recipes-extended/iptables/iptables_1.8.10.bb index 5a87897742..a9c88582cd 100644 --- a/meta/recipes-extended/iptables/iptables_1.8.10.bb +++ b/meta/recipes-extended/iptables/iptables_1.8.10.bb | |||
@@ -14,8 +14,6 @@ SRC_URI = "http://netfilter.org/projects/iptables/files/iptables-${PV}.tar.xz \ | |||
14 | file://ip6tables.service \ | 14 | file://ip6tables.service \ |
15 | file://ip6tables.rules \ | 15 | file://ip6tables.rules \ |
16 | file://0001-configure-Add-option-to-enable-disable-libnfnetlink.patch \ | 16 | file://0001-configure-Add-option-to-enable-disable-libnfnetlink.patch \ |
17 | file://0002-iptables-xshared.h-add-missing-sys.types.h-include.patch \ | ||
18 | file://0004-configure.ac-only-check-conntrack-when-libnfnetlink-.patch \ | ||
19 | " | 17 | " |
20 | SRC_URI[sha256sum] = "5cc255c189356e317d070755ce9371eb63a1b783c34498fb8c30264f3cc59c9c" | 18 | SRC_URI[sha256sum] = "5cc255c189356e317d070755ce9371eb63a1b783c34498fb8c30264f3cc59c9c" |
21 | 19 | ||
@@ -34,7 +32,7 @@ PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)}" | |||
34 | PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," | 32 | PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," |
35 | 33 | ||
36 | # libnfnetlink recipe is in meta-networking layer | 34 | # libnfnetlink recipe is in meta-networking layer |
37 | PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink,--disable-libnfnetlink,libnfnetlink libnetfilter-conntrack" | 35 | PACKAGECONFIG[libnfnetlink] = "--enable-libnfnetlink --enable-connlabel,--disable-libnfnetlink --disable-connlabel,libnfnetlink libnetfilter-conntrack" |
38 | 36 | ||
39 | # libnftnl recipe is in meta-networking layer(previously known as libnftables) | 37 | # libnftnl recipe is in meta-networking layer(previously known as libnftables) |
40 | PACKAGECONFIG[libnftnl] = "--enable-nftables,--disable-nftables,libnftnl" | 38 | PACKAGECONFIG[libnftnl] = "--enable-nftables,--disable-nftables,libnftnl" |
diff --git a/meta/recipes-extended/libarchive/libarchive_3.7.3.bb b/meta/recipes-extended/libarchive/libarchive_3.7.4.bb index bea91b6e97..da85764116 100644 --- a/meta/recipes-extended/libarchive/libarchive_3.7.3.bb +++ b/meta/recipes-extended/libarchive/libarchive_3.7.4.bb | |||
@@ -33,7 +33,7 @@ SRC_URI = "http://libarchive.org/downloads/libarchive-${PV}.tar.gz" | |||
33 | SRC_URI += "file://configurehack.patch" | 33 | SRC_URI += "file://configurehack.patch" |
34 | UPSTREAM_CHECK_URI = "http://libarchive.org/" | 34 | UPSTREAM_CHECK_URI = "http://libarchive.org/" |
35 | 35 | ||
36 | SRC_URI[sha256sum] = "f27a97bc22ceb996e72502df47dc19f99f9a0f09181ae909f09f3c9eb17b67e2" | 36 | SRC_URI[sha256sum] = "7875d49596286055b52439ed42f044bd8ad426aa4cc5aabd96bfe7abb971d5e8" |
37 | 37 | ||
38 | CVE_STATUS[CVE-2023-30571] = "upstream-wontfix: upstream has documented that reported function is not thread-safe" | 38 | CVE_STATUS[CVE-2023-30571] = "upstream-wontfix: upstream has documented that reported function is not thread-safe" |
39 | 39 | ||
diff --git a/meta/recipes-extended/ltp/ltp/0001-sched_stress-Use-time_t-instead-of-long-for-type.patch b/meta/recipes-extended/ltp/ltp/0001-sched_stress-Use-time_t-instead-of-long-for-type.patch new file mode 100644 index 0000000000..ae8dc8706e --- /dev/null +++ b/meta/recipes-extended/ltp/ltp/0001-sched_stress-Use-time_t-instead-of-long-for-type.patch | |||
@@ -0,0 +1,54 @@ | |||
1 | From 74074f9a71c876d6e95c2d72702888dd2fabc761 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Mon, 6 May 2024 11:43:20 -0700 | ||
4 | Subject: [PATCH] sched_stress: Use time_t instead of long for type | ||
5 | |||
6 | This ensures it works across different architectures | ||
7 | Fixes | ||
8 | |||
9 | | sched_driver.c:744:43: error: passing argument 1 of 'ctime' from incompatible pointer type [-Wincompatible-pointer-types] | ||
10 | | 744 | printf("\nend time = %s\n", ctime(&end_time)); | ||
11 | | | ^~~~~~~~~ | ||
12 | |||
13 | With gcc-14 | ||
14 | |||
15 | Upstream-Status: Backport [https://github.com/linux-test-project/ltp/commit/0a682f1af42d8d261202821be580fe26d17ee9b7] | ||
16 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
17 | --- | ||
18 | testcases/kernel/sched/sched_stress/sched_driver.c | 6 +++--- | ||
19 | 1 file changed, 3 insertions(+), 3 deletions(-) | ||
20 | |||
21 | diff --git a/testcases/kernel/sched/sched_stress/sched_driver.c b/testcases/kernel/sched/sched_stress/sched_driver.c | ||
22 | index 61573d788..5b8c187fe 100644 | ||
23 | --- a/testcases/kernel/sched/sched_stress/sched_driver.c | ||
24 | +++ b/testcases/kernel/sched/sched_stress/sched_driver.c | ||
25 | @@ -136,7 +136,7 @@ int debug = 0; | ||
26 | /* | ||
27 | * Function prototypes | ||
28 | */ | ||
29 | -void startup(long); | ||
30 | +void startup(time_t); | ||
31 | int start_testcase(char *, char *, char *, char *, char *, char *); | ||
32 | int process_slots_in_use(); | ||
33 | int available_user_process_slots(); | ||
34 | @@ -251,7 +251,7 @@ int main(int argc, char **argv) | ||
35 | * information to the screen and . It also initializes the * | ||
36 | * process id list and other global variables. * | ||
37 | *-----------------------------------------------------------------------*/ | ||
38 | -void startup(long start_time) | ||
39 | +void startup(time_t start_time) | ||
40 | { | ||
41 | char tempbuffer[50]; /* temporary buffer to hold names */ | ||
42 | |||
43 | @@ -734,7 +734,7 @@ void kill_short_term_testcases() | ||
44 | void finishup(start_time) | ||
45 | long start_time; /* starting time to calculate elapsed time */ | ||
46 | { | ||
47 | - long end_time; /* time when program finished */ | ||
48 | + time_t end_time; /* time when program finished */ | ||
49 | |||
50 | /* | ||
51 | * Get the end time and calculate elapsed time; write all this out | ||
52 | -- | ||
53 | 2.45.0 | ||
54 | |||
diff --git a/meta/recipes-extended/ltp/ltp_20240129.bb b/meta/recipes-extended/ltp/ltp_20240129.bb index e88e931a68..f8e6d3987e 100644 --- a/meta/recipes-extended/ltp/ltp_20240129.bb +++ b/meta/recipes-extended/ltp/ltp_20240129.bb | |||
@@ -29,6 +29,7 @@ SRCREV = "68737d20556d37364c95776044b1119c0912a36a" | |||
29 | SRC_URI = "git://github.com/linux-test-project/ltp.git;branch=master;protocol=https \ | 29 | SRC_URI = "git://github.com/linux-test-project/ltp.git;branch=master;protocol=https \ |
30 | file://0001-Remove-OOM-tests-from-runtest-mm.patch \ | 30 | file://0001-Remove-OOM-tests-from-runtest-mm.patch \ |
31 | file://0001-scenario_groups-default-remove-connectors.patch \ | 31 | file://0001-scenario_groups-default-remove-connectors.patch \ |
32 | file://0001-sched_stress-Use-time_t-instead-of-long-for-type.patch \ | ||
32 | " | 33 | " |
33 | 34 | ||
34 | S = "${WORKDIR}/git" | 35 | S = "${WORKDIR}/git" |
diff --git a/meta/recipes-extended/msmtp/msmtp_1.8.25.bb b/meta/recipes-extended/msmtp/msmtp_1.8.26.bb index b575fad5e1..13820fe930 100644 --- a/meta/recipes-extended/msmtp/msmtp_1.8.25.bb +++ b/meta/recipes-extended/msmtp/msmtp_1.8.26.bb | |||
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" | |||
11 | UPSTREAM_CHECK_URI = "https://marlam.de/msmtp/download/" | 11 | UPSTREAM_CHECK_URI = "https://marlam.de/msmtp/download/" |
12 | 12 | ||
13 | SRC_URI = "https://marlam.de/${BPN}/releases/${BP}.tar.xz" | 13 | SRC_URI = "https://marlam.de/${BPN}/releases/${BP}.tar.xz" |
14 | SRC_URI[sha256sum] = "2dfe1dbbb397d26fe0b0b6b2e9cd2efdf9d72dd42d18e70d7f363ada2652d738" | 14 | SRC_URI[sha256sum] = "6cfc488344cef189267e60aea481f00d4c7e2a59b53c6c659c520a4d121f66d8" |
15 | 15 | ||
16 | inherit gettext autotools update-alternatives pkgconfig | 16 | inherit gettext autotools update-alternatives pkgconfig |
17 | 17 | ||
diff --git a/meta/recipes-extended/net-tools/net-tools_2.10.bb b/meta/recipes-extended/net-tools/net-tools_2.10.bb index 33304297ec..7facc0cc8d 100644 --- a/meta/recipes-extended/net-tools/net-tools_2.10.bb +++ b/meta/recipes-extended/net-tools/net-tools_2.10.bb | |||
@@ -31,8 +31,8 @@ PACKAGECONFIG[plipconfig] = "" | |||
31 | do_configure() { | 31 | do_configure() { |
32 | # net-tools has its own config mechanism requiring "make config" | 32 | # net-tools has its own config mechanism requiring "make config" |
33 | # we pre-generate desired options and copy to source directory instead | 33 | # we pre-generate desired options and copy to source directory instead |
34 | cp ${WORKDIR}/net-tools-config.h ${S}/config.h | 34 | cp ${UNPACKDIR}/net-tools-config.h ${S}/config.h |
35 | cp ${WORKDIR}/net-tools-config.make ${S}/config.make | 35 | cp ${UNPACKDIR}/net-tools-config.make ${S}/config.make |
36 | 36 | ||
37 | if [ "${USE_NLS}" = "no" ]; then | 37 | if [ "${USE_NLS}" = "no" ]; then |
38 | sed -i -e 's/^I18N=1/# I18N=1/' ${S}/config.make | 38 | sed -i -e 's/^I18N=1/# I18N=1/' ${S}/config.make |
diff --git a/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch b/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch deleted file mode 100644 index 2e5503bfd4..0000000000 --- a/meta/recipes-extended/shadow/files/0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch +++ /dev/null | |||
@@ -1,37 +0,0 @@ | |||
1 | From af4b8cb780587aa736692a3baa76b60474f19c5d Mon Sep 17 00:00:00 2001 | ||
2 | From: Enrico Scholz <enrico.scholz@sigma-chemnitz.de> | ||
3 | Date: Mon, 18 Mar 2024 12:14:21 +0100 | ||
4 | Subject: [PATCH] lib/copydir:copy_entry(): use temporary stat buffer | ||
5 | |||
6 | There are no guarantees that fstatat() does not clobber the stat | ||
7 | buffer on errors. | ||
8 | |||
9 | Use a temporary buffer so that the following code sees correct | ||
10 | attributes of the source entry. | ||
11 | |||
12 | Upstream-Status: Submitted [https://github.com/shadow-maint/shadow/pull/974] | ||
13 | |||
14 | Signed-off-by: Enrico Scholz <enrico.scholz@sigma-chemnitz.de> | ||
15 | --- | ||
16 | lib/copydir.c | 3 ++- | ||
17 | 1 file changed, 2 insertions(+), 1 deletion(-) | ||
18 | |||
19 | --- a/lib/copydir.c | ||
20 | +++ b/lib/copydir.c | ||
21 | @@ -400,6 +400,7 @@ static int copy_entry (const struct path | ||
22 | { | ||
23 | int err = 0; | ||
24 | struct stat sb; | ||
25 | + struct stat tmp_sb; | ||
26 | struct link_name *lp; | ||
27 | struct timespec mt[2]; | ||
28 | |||
29 | @@ -423,7 +424,7 @@ static int copy_entry (const struct path | ||
30 | * If the destination already exists do nothing. | ||
31 | * This is after the copy_dir above to still iterate into subdirectories. | ||
32 | */ | ||
33 | - if (fstatat(dst->dirfd, dst->name, &sb, AT_SYMLINK_NOFOLLOW) != -1) { | ||
34 | + if (fstatat(dst->dirfd, dst->name, &tmp_sb, AT_SYMLINK_NOFOLLOW) != -1) { | ||
35 | return err; | ||
36 | } | ||
37 | |||
diff --git a/meta/recipes-extended/shadow/shadow-securetty_4.6.bb b/meta/recipes-extended/shadow/shadow-securetty_4.6.bb index 85c04b6af1..913c159c81 100644 --- a/meta/recipes-extended/shadow/shadow-securetty_4.6.bb +++ b/meta/recipes-extended/shadow/shadow-securetty_4.6.bb | |||
@@ -8,7 +8,8 @@ INHIBIT_DEFAULT_DEPS = "1" | |||
8 | 8 | ||
9 | SRC_URI = "file://securetty" | 9 | SRC_URI = "file://securetty" |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | # Since SERIAL_CONSOLES is likely to be set from the machine configuration | 14 | # Since SERIAL_CONSOLES is likely to be set from the machine configuration |
14 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 15 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
diff --git a/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb b/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb index 00ab58b38c..13cfab6aab 100644 --- a/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb +++ b/meta/recipes-extended/shadow/shadow-sysroot_4.6.bb | |||
@@ -13,7 +13,8 @@ DEPENDS = "base-passwd" | |||
13 | # can add custom users/groups for recipes that use inherit useradd. | 13 | # can add custom users/groups for recipes that use inherit useradd. |
14 | SRC_URI = "file://login.defs_shadow-sysroot" | 14 | SRC_URI = "file://login.defs_shadow-sysroot" |
15 | 15 | ||
16 | S = "${WORKDIR}" | 16 | S = "${WORKDIR}/sources" |
17 | UNPACKDIR = "${S}" | ||
17 | 18 | ||
18 | do_install() { | 19 | do_install() { |
19 | install -d ${D}${sysconfdir} | 20 | install -d ${D}${sysconfdir} |
diff --git a/meta/recipes-extended/shadow/shadow.inc b/meta/recipes-extended/shadow/shadow.inc index 25930b64c1..3991006b43 100644 --- a/meta/recipes-extended/shadow/shadow.inc +++ b/meta/recipes-extended/shadow/shadow.inc | |||
@@ -12,7 +12,6 @@ DEPENDS = "virtual/crypt" | |||
12 | 12 | ||
13 | GITHUB_BASE_URI = "https://github.com/shadow-maint/shadow/releases" | 13 | GITHUB_BASE_URI = "https://github.com/shadow-maint/shadow/releases" |
14 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BP}.tar.gz \ | 14 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BP}.tar.gz \ |
15 | file://0001-lib-copydir-copy_entry-use-temporary-stat-buffer.patch \ | ||
16 | ${@bb.utils.contains('PACKAGECONFIG', 'pam', '${PAM_SRC_URI}', '', d)} \ | 15 | ${@bb.utils.contains('PACKAGECONFIG', 'pam', '${PAM_SRC_URI}', '', d)} \ |
17 | file://useradd \ | 16 | file://useradd \ |
18 | " | 17 | " |
@@ -25,7 +24,7 @@ SRC_URI:append:class-target = " \ | |||
25 | SRC_URI:append:class-native = " \ | 24 | SRC_URI:append:class-native = " \ |
26 | file://commonio.c-fix-unexpected-open-failure-in-chroot-env.patch \ | 25 | file://commonio.c-fix-unexpected-open-failure-in-chroot-env.patch \ |
27 | " | 26 | " |
28 | SRC_URI[sha256sum] = "377fe0d7c1a0aa5e3514c08fdf5ddc70c9dcbb391678c2134445ed97326bcc26" | 27 | SRC_URI[sha256sum] = "b34686b89b279887ffbf1f33128902ccc0fa1a998a3add44213bb12d7385b218" |
29 | 28 | ||
30 | # Additional Policy files for PAM | 29 | # Additional Policy files for PAM |
31 | PAM_SRC_URI = "file://pam.d/chfn \ | 30 | PAM_SRC_URI = "file://pam.d/chfn \ |
diff --git a/meta/recipes-extended/shadow/shadow_4.15.0.bb b/meta/recipes-extended/shadow/shadow_4.15.1.bb index e57676c1da..e57676c1da 100644 --- a/meta/recipes-extended/shadow/shadow_4.15.0.bb +++ b/meta/recipes-extended/shadow/shadow_4.15.1.bb | |||
diff --git a/meta/recipes-extended/stress-ng/stress-ng_0.17.07.bb b/meta/recipes-extended/stress-ng/stress-ng_0.17.08.bb index fb88e06a7f..fffe6a1823 100644 --- a/meta/recipes-extended/stress-ng/stress-ng_0.17.07.bb +++ b/meta/recipes-extended/stress-ng/stress-ng_0.17.08.bb | |||
@@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" | |||
7 | 7 | ||
8 | SRC_URI = "git://github.com/ColinIanKing/stress-ng.git;protocol=https;branch=master \ | 8 | SRC_URI = "git://github.com/ColinIanKing/stress-ng.git;protocol=https;branch=master \ |
9 | " | 9 | " |
10 | SRCREV = "519151f460738cd62b69b84f8096cd218131e0a2" | 10 | SRCREV = "b7c7a5877501679a3b0a67d877e6274a801d1e4e" |
11 | S = "${WORKDIR}/git" | 11 | S = "${WORKDIR}/git" |
12 | 12 | ||
13 | DEPENDS = "coreutils-native libbsd" | 13 | DEPENDS = "coreutils-native libbsd" |
diff --git a/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy-native.bb b/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy-native.bb index a942ac2991..51d9c92766 100644 --- a/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy-native.bb +++ b/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy-native.bb | |||
@@ -8,7 +8,8 @@ PV = "1.0" | |||
8 | 8 | ||
9 | SRC_URI = "file://template.py file://COPYING" | 9 | SRC_URI = "file://template.py file://COPYING" |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | inherit native | 14 | inherit native |
14 | 15 | ||
diff --git a/meta/recipes-extended/timezone/tzdata.bb b/meta/recipes-extended/timezone/tzdata.bb index dd1960ffa7..2099b05db8 100644 --- a/meta/recipes-extended/timezone/tzdata.bb +++ b/meta/recipes-extended/timezone/tzdata.bb | |||
@@ -20,6 +20,7 @@ do_configure[cleandirs] = "${B}" | |||
20 | B = "${WORKDIR}/build" | 20 | B = "${WORKDIR}/build" |
21 | 21 | ||
22 | do_compile() { | 22 | do_compile() { |
23 | oe_runmake -C ${S} tzdata.zi | ||
23 | for zone in ${TZONES}; do | 24 | for zone in ${TZONES}; do |
24 | ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo -L /dev/null ${S}/${zone} | 25 | ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo -L /dev/null ${S}/${zone} |
25 | ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo/posix -L /dev/null ${S}/${zone} | 26 | ${STAGING_BINDIR_NATIVE}/zic -b ${ZIC_FMT} -d ${B}/zoneinfo/posix -L /dev/null ${S}/${zone} |
@@ -37,6 +38,7 @@ do_install() { | |||
37 | cp -pP "${S}/iso3166.tab" ${D}${datadir}/zoneinfo | 38 | cp -pP "${S}/iso3166.tab" ${D}${datadir}/zoneinfo |
38 | cp -pP "${S}/leapseconds" ${D}${datadir}/zoneinfo | 39 | cp -pP "${S}/leapseconds" ${D}${datadir}/zoneinfo |
39 | cp -pP "${S}/leap-seconds.list" ${D}${datadir}/zoneinfo | 40 | cp -pP "${S}/leap-seconds.list" ${D}${datadir}/zoneinfo |
41 | cp -pP "${S}/tzdata.zi" ${D}${datadir}/zoneinfo | ||
40 | 42 | ||
41 | # Install default timezone | 43 | # Install default timezone |
42 | if [ -e ${D}${datadir}/zoneinfo/${DEFAULT_TIMEZONE} ]; then | 44 | if [ -e ${D}${datadir}/zoneinfo/${DEFAULT_TIMEZONE} ]; then |
@@ -141,6 +143,7 @@ FILES:tzdata-core += " \ | |||
141 | ${sysconfdir}/timezone \ | 143 | ${sysconfdir}/timezone \ |
142 | ${datadir}/zoneinfo/leapseconds \ | 144 | ${datadir}/zoneinfo/leapseconds \ |
143 | ${datadir}/zoneinfo/leap-seconds.list \ | 145 | ${datadir}/zoneinfo/leap-seconds.list \ |
146 | ${datadir}/zoneinfo/tzdata.zi \ | ||
144 | ${datadir}/zoneinfo/Pacific/Honolulu \ | 147 | ${datadir}/zoneinfo/Pacific/Honolulu \ |
145 | ${datadir}/zoneinfo/America/Anchorage \ | 148 | ${datadir}/zoneinfo/America/Anchorage \ |
146 | ${datadir}/zoneinfo/America/Los_Angeles \ | 149 | ${datadir}/zoneinfo/America/Los_Angeles \ |
diff --git a/meta/recipes-extended/watchdog/watchdog-config.bb b/meta/recipes-extended/watchdog/watchdog-config.bb index e826a7d4a6..17151ced5e 100644 --- a/meta/recipes-extended/watchdog/watchdog-config.bb +++ b/meta/recipes-extended/watchdog/watchdog-config.bb | |||
@@ -13,6 +13,9 @@ SRC_URI = " \ | |||
13 | file://watchdog.conf \ | 13 | file://watchdog.conf \ |
14 | " | 14 | " |
15 | 15 | ||
16 | S = "${WORKDIR}/sources" | ||
17 | UNPACKDIR = "${S}" | ||
18 | |||
16 | # The default value is 60 seconds when null. | 19 | # The default value is 60 seconds when null. |
17 | WATCHDOG_TIMEOUT ??= "" | 20 | WATCHDOG_TIMEOUT ??= "" |
18 | 21 | ||
diff --git a/meta/recipes-extended/zip/zip-3.0/0001-configure-Include-dirent.h-for-closedir-opendir-APIs.patch b/meta/recipes-extended/zip/zip-3.0/0001-configure-Include-dirent.h-for-closedir-opendir-APIs.patch new file mode 100644 index 0000000000..0d3af37ded --- /dev/null +++ b/meta/recipes-extended/zip/zip-3.0/0001-configure-Include-dirent.h-for-closedir-opendir-APIs.patch | |||
@@ -0,0 +1,45 @@ | |||
1 | From 9db2f8cdbbc0dfb359d3b4e5dfe48c18652ce531 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Wed, 8 May 2024 19:02:46 -0700 | ||
4 | Subject: [PATCH] configure: Include dirent.h for closedir/opendir APIs | ||
5 | MIME-Version: 1.0 | ||
6 | Content-Type: text/plain; charset=UTF-8 | ||
7 | Content-Transfer-Encoding: 8bit | ||
8 | |||
9 | GCC-14 is strict about function prototypes and since the | ||
10 | testcase tries to compile/link opendir/closedir functions | ||
11 | without including signatures, it fails to build the test | ||
12 | due to missing signatures which come from dirent.h | ||
13 | |||
14 | Therefore include the needed system header and make it more | ||
15 | robust. | ||
16 | |||
17 | Fixes | ||
18 | a.c:2:21: error: implicit declaration of function ‘closedir’ [-Wimplicit-function-declaration] | ||
19 | 2 | int main() { return closedir(opendir(".")); } | ||
20 | | ^~~~~~~~ | ||
21 | a.c:2:30: error: implicit declaration of function ‘opendir’ [-Wimplicit-function-declaration] | ||
22 | 2 | int main() { return closedir(opendir(".")); } | ||
23 | | ^~~~~~~ | ||
24 | |||
25 | Upstream-Status: Inactive-Upstream | ||
26 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
27 | --- | ||
28 | unix/configure | 1 + | ||
29 | 1 file changed, 1 insertion(+) | ||
30 | |||
31 | diff --git a/unix/configure b/unix/configure | ||
32 | index f917086..1dd98c6 100644 | ||
33 | --- a/unix/configure | ||
34 | +++ b/unix/configure | ||
35 | @@ -591,6 +591,7 @@ $CC $CFLAGS -c conftest.c >/dev/null 2>/dev/null | ||
36 | |||
37 | echo Check for directory libraries | ||
38 | cat > conftest.c << _EOF_ | ||
39 | +#include <dirent.h> | ||
40 | int main() { return closedir(opendir(".")); } | ||
41 | _EOF_ | ||
42 | |||
43 | -- | ||
44 | 2.45.0 | ||
45 | |||
diff --git a/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch b/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch deleted file mode 100644 index a86e03e620..0000000000 --- a/meta/recipes-extended/zip/zip-3.0/0002-unix.c-Do-not-redefine-DIR-as-FILE.patch +++ /dev/null | |||
@@ -1,35 +0,0 @@ | |||
1 | From 76f5bf3546d826dcbc03acbefcf0b10b972bf136 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Wed, 10 Aug 2022 17:19:38 -0700 | ||
4 | Subject: [PATCH 2/2] unix.c: Do not redefine DIR as FILE | ||
5 | |||
6 | DIR is already provided on Linux via | ||
7 | /usr/include/dirent.h system header | ||
8 | |||
9 | Upstream-Status: Inactive-Upstream | ||
10 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
11 | --- | ||
12 | unix/unix.c | 2 -- | ||
13 | 1 file changed, 2 deletions(-) | ||
14 | |||
15 | diff --git a/unix/unix.c b/unix/unix.c | ||
16 | index ba87614..6e6f4d2 100644 | ||
17 | --- a/unix/unix.c | ||
18 | +++ b/unix/unix.c | ||
19 | @@ -61,13 +61,11 @@ local time_t label_utim = 0; | ||
20 | /* Local functions */ | ||
21 | local char *readd OF((DIR *)); | ||
22 | |||
23 | - | ||
24 | #ifdef NO_DIR /* for AT&T 3B1 */ | ||
25 | #include <sys/dir.h> | ||
26 | #ifndef dirent | ||
27 | # define dirent direct | ||
28 | #endif | ||
29 | -typedef FILE DIR; | ||
30 | /* | ||
31 | ** Apparently originally by Rich Salz. | ||
32 | ** Cleaned up and modified by James W. Birdsall. | ||
33 | -- | ||
34 | 2.37.1 | ||
35 | |||
diff --git a/meta/recipes-extended/zip/zip_3.0.bb b/meta/recipes-extended/zip/zip_3.0.bb index 70df5ab872..ec54206335 100644 --- a/meta/recipes-extended/zip/zip_3.0.bb +++ b/meta/recipes-extended/zip/zip_3.0.bb | |||
@@ -17,8 +17,8 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/infozip/Zip%203.x%20%28latest%29/3.0/zip30.tar. | |||
17 | file://0002-configure-support-PIC-code-build.patch \ | 17 | file://0002-configure-support-PIC-code-build.patch \ |
18 | file://0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch \ | 18 | file://0001-configure-Use-CFLAGS-and-LDFLAGS-when-doing-link-tes.patch \ |
19 | file://0001-configure-Specify-correct-function-signatures-and-de.patch \ | 19 | file://0001-configure-Specify-correct-function-signatures-and-de.patch \ |
20 | file://0002-unix.c-Do-not-redefine-DIR-as-FILE.patch \ | ||
21 | file://0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch \ | 20 | file://0001-unix-configure-use-_Static_assert-to-do-correct-dete.patch \ |
21 | file://0001-configure-Include-dirent.h-for-closedir-opendir-APIs.patch \ | ||
22 | " | 22 | " |
23 | UPSTREAM_VERSION_UNKNOWN = "1" | 23 | UPSTREAM_VERSION_UNKNOWN = "1" |
24 | 24 | ||
diff --git a/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch b/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch deleted file mode 100644 index 847a641691..0000000000 --- a/meta/recipes-extended/zstd/zstd/0001-pzstd-use-directly-for-the-test-c-snippet.patch +++ /dev/null | |||
@@ -1,39 +0,0 @@ | |||
1 | From 121ef5253a49065dea6a89536ca7bd3dabd40e25 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alexander Kanavin <alex@linutronix.de> | ||
3 | Date: Mon, 19 Jun 2023 17:10:09 +0200 | ||
4 | Subject: [PATCH] pzstd: use c++14 without conditions | ||
5 | |||
6 | Doing this check with a direct c++ snippet is prone to portability problems: | ||
7 | |||
8 | - \043 is not portable between shells: dash expands it to #, | ||
9 | bash does not; | ||
10 | |||
11 | - using # directly works with make 4.3 but does not with make 4.2. | ||
12 | |||
13 | Let's just use the c++ version that covers both the code and the gtest. | ||
14 | |||
15 | Upstream-Status: Submitted [https://github.com/facebook/zstd/pull/3682] | ||
16 | Signed-off-by: Alexander Kanavin <alex@linutronix.de> | ||
17 | |||
18 | --- | ||
19 | contrib/pzstd/Makefile | 7 ++----- | ||
20 | 1 file changed, 2 insertions(+), 5 deletions(-) | ||
21 | |||
22 | diff --git a/contrib/pzstd/Makefile b/contrib/pzstd/Makefile | ||
23 | index e62f8e87..58fb82a1 100644 | ||
24 | --- a/contrib/pzstd/Makefile | ||
25 | +++ b/contrib/pzstd/Makefile | ||
26 | @@ -37,11 +37,8 @@ CFLAGS += -Wno-deprecated-declarations | ||
27 | PZSTD_INC = -I$(ZSTDDIR) -I$(ZSTDDIR)/common -I$(PROGDIR) -I. | ||
28 | GTEST_INC = -isystem googletest/googletest/include | ||
29 | |||
30 | -# If default C++ version is older than C++11, explicitly set C++11, which is the | ||
31 | -# minimum required by the code. | ||
32 | -ifeq ($(shell echo "\043if __cplusplus < 201103L\n\043error\n\043endif" | $(CXX) -x c++ -Werror -c - -o /dev/null 2>/dev/null && echo 1 || echo 0),0) | ||
33 | -PZSTD_CXX_STD := -std=c++11 | ||
34 | -endif | ||
35 | +# Set the minimum required by gtest | ||
36 | +PZSTD_CXX_STD := -std=c++14 | ||
37 | |||
38 | PZSTD_CPPFLAGS = $(PZSTD_INC) | ||
39 | PZSTD_CCXXFLAGS = | ||
diff --git a/meta/recipes-extended/zstd/zstd_1.5.5.bb b/meta/recipes-extended/zstd/zstd_1.5.6.bb index 2d72af50a4..0f3309d8af 100644 --- a/meta/recipes-extended/zstd/zstd_1.5.5.bb +++ b/meta/recipes-extended/zstd/zstd_1.5.6.bb | |||
@@ -10,10 +10,9 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=0822a32f7acdbe013606746641746ee8 \ | |||
10 | file://COPYING;md5=39bba7d2cf0ba1036f2a6e2be52fe3f0 \ | 10 | file://COPYING;md5=39bba7d2cf0ba1036f2a6e2be52fe3f0 \ |
11 | " | 11 | " |
12 | 12 | ||
13 | SRC_URI = "git://github.com/facebook/zstd.git;branch=release;protocol=https \ | 13 | SRC_URI = "git://github.com/facebook/zstd.git;branch=release;protocol=https" |
14 | file://0001-pzstd-use-directly-for-the-test-c-snippet.patch" | ||
15 | 14 | ||
16 | SRCREV = "63779c798237346c2b245c546c40b72a5a5913fe" | 15 | SRCREV = "794ea1b0afca0f020f4e57b6732332231fb23c70" |
17 | UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)" | 16 | UPSTREAM_CHECK_GITTAGREGEX = "v(?P<pver>\d+(\.\d+)+)" |
18 | 17 | ||
19 | CVE_PRODUCT = "zstandard" | 18 | CVE_PRODUCT = "zstandard" |
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch index 3d685db774..24edda8102 100644 --- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch +++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 4bfb696fd125f044e3df9f6983c4ad518d9552c7 Mon Sep 17 00:00:00 2001 | 1 | From 325a4cde99a00b84116ab7111d27e6973f3c5026 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex@linutronix.de> | 2 | From: Alexander Kanavin <alex@linutronix.de> |
3 | Date: Thu, 26 Jan 2023 20:29:46 +0100 | 3 | Date: Thu, 26 Jan 2023 20:29:46 +0100 |
4 | Subject: [PATCH] meson.build: allow (a subset of) tests in cross compile | 4 | Subject: [PATCH] meson.build: allow (a subset of) tests in cross compile |
@@ -19,7 +19,7 @@ Signed-off-by: Alexander Kanavin <alex@linutronix.de> | |||
19 | 2 files changed, 9 insertions(+), 7 deletions(-) | 19 | 2 files changed, 9 insertions(+), 7 deletions(-) |
20 | 20 | ||
21 | diff --git a/meson.build b/meson.build | 21 | diff --git a/meson.build b/meson.build |
22 | index 78f3683..e0feaee 100644 | 22 | index 3eb3fcc..dc7e790 100644 |
23 | --- a/meson.build | 23 | --- a/meson.build |
24 | +++ b/meson.build | 24 | +++ b/meson.build |
25 | @@ -390,10 +390,10 @@ subdir('gdk-pixbuf') | 25 | @@ -390,10 +390,10 @@ subdir('gdk-pixbuf') |
@@ -37,7 +37,7 @@ index 78f3683..e0feaee 100644 | |||
37 | endif | 37 | endif |
38 | 38 | ||
39 | diff --git a/tests/meson.build b/tests/meson.build | 39 | diff --git a/tests/meson.build b/tests/meson.build |
40 | index 78d0ad9..0c9e64e 100644 | 40 | index 3781066..911b5fb 100644 |
41 | --- a/tests/meson.build | 41 | --- a/tests/meson.build |
42 | +++ b/tests/meson.build | 42 | +++ b/tests/meson.build |
43 | @@ -4,7 +4,7 @@ | 43 | @@ -4,7 +4,7 @@ |
@@ -49,7 +49,7 @@ index 78d0ad9..0c9e64e 100644 | |||
49 | # Resources; we cannot use gnome.compile_resources() here, because we need to | 49 | # Resources; we cannot use gnome.compile_resources() here, because we need to |
50 | # override the environment in order to use the utilities we just built instead | 50 | # override the environment in order to use the utilities we just built instead |
51 | # of the system ones | 51 | # of the system ones |
52 | @@ -172,9 +172,11 @@ endif | 52 | @@ -164,9 +164,11 @@ endif |
53 | test_deps = gdk_pixbuf_deps + [ gdkpixbuf_dep, ] | 53 | test_deps = gdk_pixbuf_deps + [ gdkpixbuf_dep, ] |
54 | test_args = [ '-k' ] | 54 | test_args = [ '-k' ] |
55 | test_env = environment() | 55 | test_env = environment() |
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch index 80c93e2166..3b4bf62861 100644 --- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch +++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/fatal-loader.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 9b4f5738f8ac30f393b6163dcc84757976683d9b Mon Sep 17 00:00:00 2001 | 1 | From f78ab4edaee5f62663a9a4bcfa56e5c524da4474 Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@intel.com> | 2 | From: Ross Burton <ross.burton@intel.com> |
3 | Date: Tue, 1 Apr 2014 17:23:36 +0100 | 3 | Date: Tue, 1 Apr 2014 17:23:36 +0100 |
4 | Subject: [PATCH] gdk-pixbuf: add an option so that loader errors are fatal | 4 | Subject: [PATCH] gdk-pixbuf: add an option so that loader errors are fatal |
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.11.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.12.bb index ef0f23f8f7..9f825a68ef 100644 --- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.11.bb +++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.42.12.bb | |||
@@ -22,7 +22,7 @@ SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \ | |||
22 | file://0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch \ | 22 | file://0001-meson.build-allow-a-subset-of-tests-in-cross-compile.patch \ |
23 | " | 23 | " |
24 | 24 | ||
25 | SRC_URI[sha256sum] = "49dcb402388708647e8c321d56b6fb30f21e51e515d0c5a942268d23052a2f00" | 25 | SRC_URI[sha256sum] = "b9505b3445b9a7e48ced34760c3bcb73e966df3ac94c95a148cb669ab748e3c7" |
26 | 26 | ||
27 | inherit meson pkgconfig gettext pixbufcache ptest-gnome upstream-version-is-even gobject-introspection gi-docgen lib_package | 27 | inherit meson pkgconfig gettext pixbufcache ptest-gnome upstream-version-is-even gobject-introspection gi-docgen lib_package |
28 | 28 | ||
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch b/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch index c9e1afffd0..510bc426d1 100644 --- a/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch +++ b/meta/recipes-gnome/gobject-introspection/gobject-introspection/0001-Relocate-the-repository-directory-for-native-builds.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From aeb5532f8be42d42f4e8725ca42e239b36983a4d Mon Sep 17 00:00:00 2001 | 1 | From 4e1aa0ddbc43403ff24f644b4c4912b737290c41 Mon Sep 17 00:00:00 2001 |
2 | From: Sascha Silbe <x-yo17@se-silbe.de> | 2 | From: Sascha Silbe <x-yo17@se-silbe.de> |
3 | Date: Fri, 8 Jun 2018 13:55:10 +0200 | 3 | Date: Fri, 8 Jun 2018 13:55:10 +0200 |
4 | Subject: [PATCH] Relocate the repository directory for native builds | 4 | Subject: [PATCH] Relocate the repository directory for native builds |
@@ -14,14 +14,13 @@ cache or sstate mirror). | |||
14 | 14 | ||
15 | Upstream-Status: Inappropriate | 15 | Upstream-Status: Inappropriate |
16 | Signed-off-by: Sascha Silbe <x-yo17@se-silbe.de> | 16 | Signed-off-by: Sascha Silbe <x-yo17@se-silbe.de> |
17 | |||
18 | --- | 17 | --- |
19 | girepository/girepository.c | 15 +++++++++++++-- | 18 | girepository/girepository.c | 15 +++++++++++++-- |
20 | girepository/meson.build | 2 +- | 19 | girepository/meson.build | 2 +- |
21 | 2 files changed, 14 insertions(+), 3 deletions(-) | 20 | 2 files changed, 14 insertions(+), 3 deletions(-) |
22 | 21 | ||
23 | diff --git a/girepository/girepository.c b/girepository/girepository.c | 22 | diff --git a/girepository/girepository.c b/girepository/girepository.c |
24 | index a0754f4..2d456f9 100644 | 23 | index 12eaf36..876382c 100644 |
25 | --- a/girepository/girepository.c | 24 | --- a/girepository/girepository.c |
26 | +++ b/girepository/girepository.c | 25 | +++ b/girepository/girepository.c |
27 | @@ -21,6 +21,8 @@ | 26 | @@ -21,6 +21,8 @@ |
@@ -42,7 +41,7 @@ index a0754f4..2d456f9 100644 | |||
42 | /** | 41 | /** |
43 | * SECTION:girepository | 42 | * SECTION:girepository |
44 | * @short_description: GObject Introspection repository manager | 43 | * @short_description: GObject Introspection repository manager |
45 | @@ -215,9 +219,16 @@ init_globals (void) | 44 | @@ -222,9 +226,16 @@ init_globals (void) |
46 | g_free (custom_dirs); | 45 | g_free (custom_dirs); |
47 | } | 46 | } |
48 | 47 | ||
@@ -62,10 +61,10 @@ index a0754f4..2d456f9 100644 | |||
62 | typelib_search_path = g_slist_prepend (typelib_search_path, typelib_dir); | 61 | typelib_search_path = g_slist_prepend (typelib_search_path, typelib_dir); |
63 | 62 | ||
64 | diff --git a/girepository/meson.build b/girepository/meson.build | 63 | diff --git a/girepository/meson.build b/girepository/meson.build |
65 | index 786749a..15cf2a9 100644 | 64 | index 5ced59e..a580d9b 100644 |
66 | --- a/girepository/meson.build | 65 | --- a/girepository/meson.build |
67 | +++ b/girepository/meson.build | 66 | +++ b/girepository/meson.build |
68 | @@ -45,7 +45,7 @@ girepo_internals_lib = static_library('girepository-internals', | 67 | @@ -47,7 +47,7 @@ girepo_internals_lib = static_library('girepository-internals', |
69 | ], | 68 | ], |
70 | c_args: gi_hidden_visibility_cflags + custom_c_args, | 69 | c_args: gi_hidden_visibility_cflags + custom_c_args, |
71 | include_directories : configinc, | 70 | include_directories : configinc, |
diff --git a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.80.1.bb index 05a08a50e0..582ac248fd 100644 --- a/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.78.1.bb +++ b/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.80.1.bb | |||
@@ -16,7 +16,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=c434e8128a68bedd59b80b2ac1eb1c4a \ | |||
16 | SRC_URI = "${GNOME_MIRROR}/${BPN}/${@oe.utils.trim_version("${PV}", 2)}/${BPN}-${PV}.tar.xz \ | 16 | SRC_URI = "${GNOME_MIRROR}/${BPN}/${@oe.utils.trim_version("${PV}", 2)}/${BPN}-${PV}.tar.xz \ |
17 | " | 17 | " |
18 | 18 | ||
19 | SRC_URI[sha256sum] = "bd7babd99af7258e76819e45ba4a6bc399608fe762d83fde3cac033c50841bb4" | 19 | SRC_URI[sha256sum] = "a1df7c424e15bda1ab639c00e9051b9adf5cea1a9e512f8a603b53cd199bc6d8" |
20 | 20 | ||
21 | SRC_URI:append:class-native = " file://0001-Relocate-the-repository-directory-for-native-builds.patch" | 21 | SRC_URI:append:class-native = " file://0001-Relocate-the-repository-directory-for-native-builds.patch" |
22 | 22 | ||
@@ -26,7 +26,9 @@ GTKDOC_MESON_OPTION = "gtk_doc" | |||
26 | 26 | ||
27 | MULTILIB_SCRIPTS = "${PN}:${bindir}/g-ir-annotation-tool ${PN}:${bindir}/g-ir-scanner" | 27 | MULTILIB_SCRIPTS = "${PN}:${bindir}/g-ir-annotation-tool ${PN}:${bindir}/g-ir-scanner" |
28 | 28 | ||
29 | DEPENDS += " libffi zlib glib-2.0 python3 flex-native bison-native" | 29 | DEPENDS += " libffi zlib python3 flex-native bison-native" |
30 | DEPENDS:append:class-native = " glib-2.0" | ||
31 | DEPENDS:append:class-target = " glib-2.0-initial" | ||
30 | 32 | ||
31 | # target build needs qemu to run temporary introspection binaries created | 33 | # target build needs qemu to run temporary introspection binaries created |
32 | # on the fly by g-ir-scanner and a native version of itself to run | 34 | # on the fly by g-ir-scanner and a native version of itself to run |
@@ -189,6 +191,6 @@ FILES:${PN}-dbg += "${libdir}/gobject-introspection/giscanner/.debug/" | |||
189 | FILES:${PN}-staticdev += "${libdir}/gobject-introspection/giscanner/*.a" | 191 | FILES:${PN}-staticdev += "${libdir}/gobject-introspection/giscanner/*.a" |
190 | 192 | ||
191 | # setuptools can be removed when upstream removes all uses of distutils | 193 | # setuptools can be removed when upstream removes all uses of distutils |
192 | RDEPENDS:${PN} = "python3-pickle python3-xml python3-setuptools" | 194 | RDEPENDS:${PN} = "python3-pickle python3-xml python3-setuptools glib-2.0" |
193 | 195 | ||
194 | BBCLASSEXTEND = "native" | 196 | BBCLASSEXTEND = "native" |
diff --git a/meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb b/meta/recipes-gnome/gtk+/gtk+3_3.24.42.bb index 17e90c59f0..61cecce4d1 100644 --- a/meta/recipes-gnome/gtk+/gtk+3_3.24.41.bb +++ b/meta/recipes-gnome/gtk+/gtk+3_3.24.42.bb | |||
@@ -5,7 +5,7 @@ MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}" | |||
5 | SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \ | 5 | SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \ |
6 | file://opengl.patch \ | 6 | file://opengl.patch \ |
7 | " | 7 | " |
8 | SRC_URI[sha256sum] = "47da61487af3087a94bc49296fd025ca0bc02f96ef06c556e7c8988bd651b6fa" | 8 | SRC_URI[sha256sum] = "50f89f615092d4dd01bbd759719f8bd380e5f149f6fd78a94725e2de112377e2" |
9 | 9 | ||
10 | S = "${WORKDIR}/gtk+-${PV}" | 10 | S = "${WORKDIR}/gtk+-${PV}" |
11 | 11 | ||
@@ -14,4 +14,4 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2 \ | |||
14 | file://gdk/gdk.h;endline=25;md5=c920ce39dc88c6f06d3e7c50e08086f2 \ | 14 | file://gdk/gdk.h;endline=25;md5=c920ce39dc88c6f06d3e7c50e08086f2 \ |
15 | file://tests/testgtk.c;endline=25;md5=cb732daee1d82af7a2bf953cf3cf26f1" | 15 | file://tests/testgtk.c;endline=25;md5=cb732daee1d82af7a2bf953cf3cf26f1" |
16 | 16 | ||
17 | CVE_PRODUCT = "gnome:gtk" | 17 | CVE_PRODUCT = "gnome:gtk gtk:gtk\+" |
diff --git a/meta/recipes-gnome/gtk+/gtk4_4.14.2.bb b/meta/recipes-gnome/gtk+/gtk4_4.14.4.bb index 39483357b4..cda0552f7c 100644 --- a/meta/recipes-gnome/gtk+/gtk4_4.14.2.bb +++ b/meta/recipes-gnome/gtk+/gtk4_4.14.4.bb | |||
@@ -37,7 +37,7 @@ MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}" | |||
37 | UPSTREAM_CHECK_REGEX = "gtk-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar.xz" | 37 | UPSTREAM_CHECK_REGEX = "gtk-(?P<pver>\d+\.(\d*[02468])+(\.\d+)+)\.tar.xz" |
38 | 38 | ||
39 | SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk/${MAJ_VER}/gtk-${PV}.tar.xz" | 39 | SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk/${MAJ_VER}/gtk-${PV}.tar.xz" |
40 | SRC_URI[sha256sum] = "22604cef2898a79e5f2143bb7aee2b7d1fa2eb946989a9d1338ecf9c8ae0e072" | 40 | SRC_URI[sha256sum] = "443518b97e8348f9f6430ac435b1010f9a6c5207f4dc6a7cd5d24e3820cee633" |
41 | 41 | ||
42 | S = "${WORKDIR}/gtk-${PV}" | 42 | S = "${WORKDIR}/gtk-${PV}" |
43 | 43 | ||
diff --git a/meta/recipes-gnome/gtk-doc/files/0001-Do-not-error-out-if-xsltproc-is-not-found.patch b/meta/recipes-gnome/gtk-doc/files/0001-Do-not-error-out-if-xsltproc-is-not-found.patch index 99ae4d4417..ee1acda401 100644 --- a/meta/recipes-gnome/gtk-doc/files/0001-Do-not-error-out-if-xsltproc-is-not-found.patch +++ b/meta/recipes-gnome/gtk-doc/files/0001-Do-not-error-out-if-xsltproc-is-not-found.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 8b7fbbb405959f2868ad6eadd7dd00018758a8a5 Mon Sep 17 00:00:00 2001 | 1 | From 50d3b9d3ca0e1ff47c8e7559303855530e58839a Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Wed, 7 Sep 2016 14:52:04 +0300 | 3 | Date: Wed, 7 Sep 2016 14:52:04 +0300 |
4 | Subject: [PATCH] Do not error out if xsltproc is not found. | 4 | Subject: [PATCH] Do not error out if xsltproc is not found. |
@@ -14,10 +14,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | |||
14 | 1 file changed, 1 insertion(+), 1 deletion(-) | 14 | 1 file changed, 1 insertion(+), 1 deletion(-) |
15 | 15 | ||
16 | diff --git a/configure.ac b/configure.ac | 16 | diff --git a/configure.ac b/configure.ac |
17 | index b0c88d7..2a61d6e 100644 | 17 | index 8725074..0491090 100644 |
18 | --- a/configure.ac | 18 | --- a/configure.ac |
19 | +++ b/configure.ac | 19 | +++ b/configure.ac |
20 | @@ -58,7 +58,7 @@ dnl Check for xsltproc | 20 | @@ -37,7 +37,7 @@ dnl Check for xsltproc |
21 | dnl | 21 | dnl |
22 | AC_PATH_PROG([XSLTPROC], [xsltproc]) | 22 | AC_PATH_PROG([XSLTPROC], [xsltproc]) |
23 | if test -z "$XSLTPROC"; then | 23 | if test -z "$XSLTPROC"; then |
@@ -26,6 +26,3 @@ index b0c88d7..2a61d6e 100644 | |||
26 | fi | 26 | fi |
27 | 27 | ||
28 | dnl | 28 | dnl |
29 | -- | ||
30 | 2.9.3 | ||
31 | |||
diff --git a/meta/recipes-gnome/gtk-doc/files/0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch b/meta/recipes-gnome/gtk-doc/files/0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch index 19e05f1b99..f329539fa8 100644 --- a/meta/recipes-gnome/gtk-doc/files/0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch +++ b/meta/recipes-gnome/gtk-doc/files/0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 80e6aff72affa6d92f5abd7ff6353dfc4a7bff38 Mon Sep 17 00:00:00 2001 | 1 | From 64163565f8d6853b02e53308a6e6ba23d9d96299 Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Wed, 31 Aug 2016 16:44:46 +0300 | 3 | Date: Wed, 31 Aug 2016 16:44:46 +0300 |
4 | Subject: [PATCH] Do not hardocode paths to perl/python in scripts. | 4 | Subject: [PATCH] Do not hardocode paths to perl/python in scripts. |
@@ -9,7 +9,6 @@ on targets either. | |||
9 | 9 | ||
10 | Upstream-Status: Inappropriate [oe-core specific] | 10 | Upstream-Status: Inappropriate [oe-core specific] |
11 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | 11 | Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> |
12 | |||
13 | --- | 12 | --- |
14 | gtkdoc-check.in | 2 +- | 13 | gtkdoc-check.in | 2 +- |
15 | gtkdoc-depscan.in | 2 +- | 14 | gtkdoc-depscan.in | 2 +- |
@@ -25,7 +24,7 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | |||
25 | 11 files changed, 11 insertions(+), 11 deletions(-) | 24 | 11 files changed, 11 insertions(+), 11 deletions(-) |
26 | 25 | ||
27 | diff --git a/gtkdoc-check.in b/gtkdoc-check.in | 26 | diff --git a/gtkdoc-check.in b/gtkdoc-check.in |
28 | index 8c8e917..f6a25f6 100755 | 27 | index d9e2ad5..9ec526d 100755 |
29 | --- a/gtkdoc-check.in | 28 | --- a/gtkdoc-check.in |
30 | +++ b/gtkdoc-check.in | 29 | +++ b/gtkdoc-check.in |
31 | @@ -1,4 +1,4 @@ | 30 | @@ -1,4 +1,4 @@ |
@@ -35,7 +34,7 @@ index 8c8e917..f6a25f6 100755 | |||
35 | # | 34 | # |
36 | # gtk-doc - GTK DocBook documentation generator. | 35 | # gtk-doc - GTK DocBook documentation generator. |
37 | diff --git a/gtkdoc-depscan.in b/gtkdoc-depscan.in | 36 | diff --git a/gtkdoc-depscan.in b/gtkdoc-depscan.in |
38 | index 9bfaf30..aadc952 100755 | 37 | index c43317c..200837c 100755 |
39 | --- a/gtkdoc-depscan.in | 38 | --- a/gtkdoc-depscan.in |
40 | +++ b/gtkdoc-depscan.in | 39 | +++ b/gtkdoc-depscan.in |
41 | @@ -1,4 +1,4 @@ | 40 | @@ -1,4 +1,4 @@ |
@@ -45,7 +44,7 @@ index 9bfaf30..aadc952 100755 | |||
45 | from __future__ import print_function | 44 | from __future__ import print_function |
46 | 45 | ||
47 | diff --git a/gtkdoc-fixxref.in b/gtkdoc-fixxref.in | 46 | diff --git a/gtkdoc-fixxref.in b/gtkdoc-fixxref.in |
48 | index 44f6bd1..26fbd93 100755 | 47 | index f9c4905..ed2ead1 100755 |
49 | --- a/gtkdoc-fixxref.in | 48 | --- a/gtkdoc-fixxref.in |
50 | +++ b/gtkdoc-fixxref.in | 49 | +++ b/gtkdoc-fixxref.in |
51 | @@ -1,4 +1,4 @@ | 50 | @@ -1,4 +1,4 @@ |
@@ -55,7 +54,7 @@ index 44f6bd1..26fbd93 100755 | |||
55 | # | 54 | # |
56 | # gtk-doc - GTK DocBook documentation generator. | 55 | # gtk-doc - GTK DocBook documentation generator. |
57 | diff --git a/gtkdoc-mkdb.in b/gtkdoc-mkdb.in | 56 | diff --git a/gtkdoc-mkdb.in b/gtkdoc-mkdb.in |
58 | index 42d5731..3a56d56 100755 | 57 | index 641f3c4..35e95fb 100755 |
59 | --- a/gtkdoc-mkdb.in | 58 | --- a/gtkdoc-mkdb.in |
60 | +++ b/gtkdoc-mkdb.in | 59 | +++ b/gtkdoc-mkdb.in |
61 | @@ -1,4 +1,4 @@ | 60 | @@ -1,4 +1,4 @@ |
@@ -65,7 +64,7 @@ index 42d5731..3a56d56 100755 | |||
65 | # | 64 | # |
66 | # gtk-doc - GTK DocBook documentation generator. | 65 | # gtk-doc - GTK DocBook documentation generator. |
67 | diff --git a/gtkdoc-mkhtml.in b/gtkdoc-mkhtml.in | 66 | diff --git a/gtkdoc-mkhtml.in b/gtkdoc-mkhtml.in |
68 | index 0d0a15d..914ff55 100755 | 67 | index b09f7a9..8d45966 100755 |
69 | --- a/gtkdoc-mkhtml.in | 68 | --- a/gtkdoc-mkhtml.in |
70 | +++ b/gtkdoc-mkhtml.in | 69 | +++ b/gtkdoc-mkhtml.in |
71 | @@ -1,4 +1,4 @@ | 70 | @@ -1,4 +1,4 @@ |
@@ -75,7 +74,7 @@ index 0d0a15d..914ff55 100755 | |||
75 | # | 74 | # |
76 | # gtk-doc - GTK DocBook documentation generator. | 75 | # gtk-doc - GTK DocBook documentation generator. |
77 | diff --git a/gtkdoc-mkman.in b/gtkdoc-mkman.in | 76 | diff --git a/gtkdoc-mkman.in b/gtkdoc-mkman.in |
78 | index c5445cd..65db71a 100755 | 77 | index 8f7b018..9bac0b6 100755 |
79 | --- a/gtkdoc-mkman.in | 78 | --- a/gtkdoc-mkman.in |
80 | +++ b/gtkdoc-mkman.in | 79 | +++ b/gtkdoc-mkman.in |
81 | @@ -1,4 +1,4 @@ | 80 | @@ -1,4 +1,4 @@ |
@@ -85,7 +84,7 @@ index c5445cd..65db71a 100755 | |||
85 | # | 84 | # |
86 | # gtk-doc - GTK DocBook documentation generator. | 85 | # gtk-doc - GTK DocBook documentation generator. |
87 | diff --git a/gtkdoc-mkpdf.in b/gtkdoc-mkpdf.in | 86 | diff --git a/gtkdoc-mkpdf.in b/gtkdoc-mkpdf.in |
88 | index e8c0c03..f807236 100755 | 87 | index 7dca4b0..808f92d 100755 |
89 | --- a/gtkdoc-mkpdf.in | 88 | --- a/gtkdoc-mkpdf.in |
90 | +++ b/gtkdoc-mkpdf.in | 89 | +++ b/gtkdoc-mkpdf.in |
91 | @@ -1,4 +1,4 @@ | 90 | @@ -1,4 +1,4 @@ |
@@ -95,7 +94,7 @@ index e8c0c03..f807236 100755 | |||
95 | # | 94 | # |
96 | # gtk-doc - GTK DocBook documentation generator. | 95 | # gtk-doc - GTK DocBook documentation generator. |
97 | diff --git a/gtkdoc-rebase.in b/gtkdoc-rebase.in | 96 | diff --git a/gtkdoc-rebase.in b/gtkdoc-rebase.in |
98 | index 17a71c2..ec3fd28 100755 | 97 | index e8b0bf5..a960d8f 100755 |
99 | --- a/gtkdoc-rebase.in | 98 | --- a/gtkdoc-rebase.in |
100 | +++ b/gtkdoc-rebase.in | 99 | +++ b/gtkdoc-rebase.in |
101 | @@ -1,4 +1,4 @@ | 100 | @@ -1,4 +1,4 @@ |
@@ -105,7 +104,7 @@ index 17a71c2..ec3fd28 100755 | |||
105 | # | 104 | # |
106 | # gtk-doc - GTK DocBook documentation generator. | 105 | # gtk-doc - GTK DocBook documentation generator. |
107 | diff --git a/gtkdoc-scan.in b/gtkdoc-scan.in | 106 | diff --git a/gtkdoc-scan.in b/gtkdoc-scan.in |
108 | index 954c811..f461504 100755 | 107 | index 7893ebc..b7eb83d 100755 |
109 | --- a/gtkdoc-scan.in | 108 | --- a/gtkdoc-scan.in |
110 | +++ b/gtkdoc-scan.in | 109 | +++ b/gtkdoc-scan.in |
111 | @@ -1,4 +1,4 @@ | 110 | @@ -1,4 +1,4 @@ |
@@ -115,7 +114,7 @@ index 954c811..f461504 100755 | |||
115 | # | 114 | # |
116 | # gtk-doc - GTK DocBook documentation generator. | 115 | # gtk-doc - GTK DocBook documentation generator. |
117 | diff --git a/gtkdoc-scangobj.in b/gtkdoc-scangobj.in | 116 | diff --git a/gtkdoc-scangobj.in b/gtkdoc-scangobj.in |
118 | index 4cbe130..52c2c24 100755 | 117 | index 7660c70..c6ff072 100755 |
119 | --- a/gtkdoc-scangobj.in | 118 | --- a/gtkdoc-scangobj.in |
120 | +++ b/gtkdoc-scangobj.in | 119 | +++ b/gtkdoc-scangobj.in |
121 | @@ -1,4 +1,4 @@ | 120 | @@ -1,4 +1,4 @@ |
diff --git a/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch b/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch deleted file mode 100644 index f40124877c..0000000000 --- a/meta/recipes-gnome/gtk-doc/files/0001-Don-t-use-docdir-from-environment.patch +++ /dev/null | |||
@@ -1,24 +0,0 @@ | |||
1 | From 72dfeec0e49478b0bfb471c4155044391bad8e6c Mon Sep 17 00:00:00 2001 | ||
2 | From: Ross Burton <ross.burton@arm.com> | ||
3 | Date: Fri, 8 Dec 2023 10:35:25 +0000 | ||
4 | Subject: [PATCH] Don't use docdir from environment | ||
5 | |||
6 | Upstream-Status: Submitted [https://gitlab.gnome.org/GNOME/gtk-doc/-/merge_requests/73] | ||
7 | Signed-off-by: Ross Burton <ross.burton@arm.com> | ||
8 | --- | ||
9 | buildsystems/autotools/gtkdocize.in | 2 +- | ||
10 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
11 | |||
12 | diff --git a/buildsystems/autotools/gtkdocize.in b/buildsystems/autotools/gtkdocize.in | ||
13 | index 83127bf..76dcbfd 100755 | ||
14 | --- a/buildsystems/autotools/gtkdocize.in | ||
15 | +++ b/buildsystems/autotools/gtkdocize.in | ||
16 | @@ -39,7 +39,7 @@ set - $args | ||
17 | |||
18 | # assume working directory if srcdir is not set | ||
19 | test "$srcdir" || srcdir=. | ||
20 | -test "$docdir" || docdir="$srcdir" | ||
21 | +docdir="$srcdir" | ||
22 | |||
23 | # detect configure script | ||
24 | no_configure_found=0 | ||
diff --git a/meta/recipes-gnome/gtk-doc/files/conditionaltests.patch b/meta/recipes-gnome/gtk-doc/files/conditionaltests.patch index 21c2db7826..55cae34b46 100644 --- a/meta/recipes-gnome/gtk-doc/files/conditionaltests.patch +++ b/meta/recipes-gnome/gtk-doc/files/conditionaltests.patch | |||
@@ -1,23 +1,22 @@ | |||
1 | From 78bbf185934147a69ceb4b617d424e12e70997bf Mon Sep 17 00:00:00 2001 | 1 | From 9aa9230a305e964b2417daea2b51e43b2dee607c Mon Sep 17 00:00:00 2001 |
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | 2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> |
3 | Date: Tue, 27 Jun 2017 21:00:58 +0100 | 3 | Date: Tue, 27 Jun 2017 21:00:58 +0100 |
4 | Subject: [PATCH 3/3] gtk-doc: Handle floating gtk-doc dependency | 4 | Subject: [PATCH] gtk-doc: Handle floating gtk-doc dependency |
5 | 5 | ||
6 | Allow the tests to be explicitly disabled to avoid floating dependnecy | 6 | Allow the tests to be explicitly disabled to avoid floating dependnecy |
7 | issues. This is not really an issue with RSS but is on previous releases. | 7 | issues. This is not really an issue with RSS but is on previous releases. |
8 | 8 | ||
9 | RP 2017/6/27 | 9 | RP 2017/6/27 |
10 | Upstream-Status: Pending | 10 | Upstream-Status: Pending |
11 | |||
12 | --- | 11 | --- |
13 | configure.ac | 10 ++++++++++ | 12 | configure.ac | 10 ++++++++++ |
14 | 1 file changed, 10 insertions(+) | 13 | 1 file changed, 10 insertions(+) |
15 | 14 | ||
16 | diff --git a/configure.ac b/configure.ac | 15 | diff --git a/configure.ac b/configure.ac |
17 | index 684e2d1..e5e3aab 100644 | 16 | index 0491090..4270c88 100644 |
18 | --- a/configure.ac | 17 | --- a/configure.ac |
19 | +++ b/configure.ac | 18 | +++ b/configure.ac |
20 | @@ -146,6 +146,11 @@ if test "x$GCC" = "xyes"; then | 19 | @@ -82,6 +82,11 @@ if test "x$GCC" = "xyes"; then |
21 | fi | 20 | fi |
22 | fi | 21 | fi |
23 | 22 | ||
@@ -29,7 +28,7 @@ index 684e2d1..e5e3aab 100644 | |||
29 | dnl if glib is available we can enable the tests | 28 | dnl if glib is available we can enable the tests |
30 | PKG_CHECK_MODULES(TEST_DEPS, [glib-2.0 >= 2.6.0 gobject-2.0 >= 2.6.0], | 29 | PKG_CHECK_MODULES(TEST_DEPS, [glib-2.0 >= 2.6.0 gobject-2.0 >= 2.6.0], |
31 | [ glib_prefix="`$PKG_CONFIG --variable=prefix glib-2.0`" | 30 | [ glib_prefix="`$PKG_CONFIG --variable=prefix glib-2.0`" |
32 | @@ -156,6 +161,11 @@ PKG_CHECK_MODULES(TEST_DEPS, [glib-2.0 >= 2.6.0 gobject-2.0 >= 2.6.0], | 31 | @@ -92,6 +97,11 @@ PKG_CHECK_MODULES(TEST_DEPS, [glib-2.0 >= 2.6.0 gobject-2.0 >= 2.6.0], |
33 | build_tests="no" | 32 | build_tests="no" |
34 | ] | 33 | ] |
35 | ) | 34 | ) |
@@ -41,6 +40,3 @@ index 684e2d1..e5e3aab 100644 | |||
41 | AM_CONDITIONAL(GTK_DOC_USE_LIBTOOL, test -n "$LIBTOOL" -a x$gtk_doc_use_libtool = xyes ) | 40 | AM_CONDITIONAL(GTK_DOC_USE_LIBTOOL, test -n "$LIBTOOL" -a x$gtk_doc_use_libtool = xyes ) |
42 | dnl this enables the rule in test/Makefile.am | 41 | dnl this enables the rule in test/Makefile.am |
43 | AM_CONDITIONAL(BUILD_TESTS, test x$build_tests = xyes) | 42 | AM_CONDITIONAL(BUILD_TESTS, test x$build_tests = xyes) |
44 | -- | ||
45 | 2.14.1 | ||
46 | |||
diff --git a/meta/recipes-gnome/gtk-doc/files/no-clobber.patch b/meta/recipes-gnome/gtk-doc/files/no-clobber.patch index d1695479dc..90207d6c12 100644 --- a/meta/recipes-gnome/gtk-doc/files/no-clobber.patch +++ b/meta/recipes-gnome/gtk-doc/files/no-clobber.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 8bc4c1f169f89bc5531da5b7e892b8f20e0f9a18 Mon Sep 17 00:00:00 2001 | 1 | From fa5e31e02b9d3c6c5ec3cdc1f34dd6c179520fd0 Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@intel.com> | 2 | From: Ross Burton <ross.burton@intel.com> |
3 | Date: Wed, 21 Mar 2018 14:47:29 +0000 | 3 | Date: Wed, 21 Mar 2018 14:47:29 +0000 |
4 | Subject: [PATCH] gtk-doc: don't regenerate gtk-doc in do_install | 4 | Subject: [PATCH] gtk-doc: don't regenerate gtk-doc in do_install |
@@ -20,16 +20,15 @@ files which don't already exist. | |||
20 | 20 | ||
21 | Upstream-Status: Submitted [https://bugzilla.gnome.org/show_bug.cgi?id=794571] | 21 | Upstream-Status: Submitted [https://bugzilla.gnome.org/show_bug.cgi?id=794571] |
22 | Signed-off-by: Ross Burton <ross.burton@intel.com> | 22 | Signed-off-by: Ross Burton <ross.burton@intel.com> |
23 | |||
24 | --- | 23 | --- |
25 | buildsystems/autotools/gtk-doc.make | 2 +- | 24 | buildsystems/autotools/gtk-doc.make | 2 +- |
26 | 1 file changed, 1 insertion(+), 1 deletion(-) | 25 | 1 file changed, 1 insertion(+), 1 deletion(-) |
27 | 26 | ||
28 | diff --git a/buildsystems/autotools/gtk-doc.make b/buildsystems/autotools/gtk-doc.make | 27 | diff --git a/buildsystems/autotools/gtk-doc.make b/buildsystems/autotools/gtk-doc.make |
29 | index 7d9a27f..8cb336d 100644 | 28 | index fb2e7e2..4f6fe2b 100644 |
30 | --- a/buildsystems/autotools/gtk-doc.make | 29 | --- a/buildsystems/autotools/gtk-doc.make |
31 | +++ b/buildsystems/autotools/gtk-doc.make | 30 | +++ b/buildsystems/autotools/gtk-doc.make |
32 | @@ -111,7 +111,7 @@ setup-build.stamp: | 31 | @@ -112,7 +112,7 @@ setup-build.stamp: |
33 | destdir=`dirname $(abs_builddir)/$$file`; \ | 32 | destdir=`dirname $(abs_builddir)/$$file`; \ |
34 | test -d "$$destdir" || mkdir -p "$$destdir"; \ | 33 | test -d "$$destdir" || mkdir -p "$$destdir"; \ |
35 | test -f $(abs_srcdir)/$$file && \ | 34 | test -f $(abs_srcdir)/$$file && \ |
diff --git a/meta/recipes-gnome/gtk-doc/files/pkg-config-native.patch b/meta/recipes-gnome/gtk-doc/files/pkg-config-native.patch index 1d260aa72b..1b5c190f56 100644 --- a/meta/recipes-gnome/gtk-doc/files/pkg-config-native.patch +++ b/meta/recipes-gnome/gtk-doc/files/pkg-config-native.patch | |||
@@ -1,11 +1,10 @@ | |||
1 | From ebb0f7313a0931f646e86badce2627eff2fa37a8 Mon Sep 17 00:00:00 2001 | 1 | From c11fba9bcb8d55568067e626bd959841c05081e5 Mon Sep 17 00:00:00 2001 |
2 | From: Ross Burton <ross.burton@intel.com> | 2 | From: Ross Burton <ross.burton@intel.com> |
3 | Date: Mon, 5 Sep 2016 22:25:44 +0100 | 3 | Date: Mon, 5 Sep 2016 22:25:44 +0100 |
4 | Subject: [PATCH] Use native pkg-config when looking for gtk-doc. | 4 | Subject: [PATCH] Use native pkg-config when looking for gtk-doc. |
5 | 5 | ||
6 | Upstream-Status: Inappropriate | 6 | Upstream-Status: Inappropriate |
7 | Signed-off-by: Ross Burton <ross.burton@intel.com> | 7 | Signed-off-by: Ross Burton <ross.burton@intel.com> |
8 | |||
9 | --- | 8 | --- |
10 | buildsystems/autotools/gtk-doc.m4 | 7 ++++++- | 9 | buildsystems/autotools/gtk-doc.m4 | 7 ++++++- |
11 | 1 file changed, 6 insertions(+), 1 deletion(-) | 10 | 1 file changed, 6 insertions(+), 1 deletion(-) |
diff --git a/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb b/meta/recipes-gnome/gtk-doc/gtk-doc_1.34.0.bb index 4fd5a6e925..693ae427e0 100644 --- a/meta/recipes-gnome/gtk-doc/gtk-doc_1.33.2.bb +++ b/meta/recipes-gnome/gtk-doc/gtk-doc_1.34.0.bb | |||
@@ -21,12 +21,11 @@ PACKAGECONFIG[tests] = "--enable-tests,--disable-tests,glib-2.0" | |||
21 | 21 | ||
22 | CACHED_CONFIGUREVARS += "ac_cv_path_XSLTPROC=xsltproc" | 22 | CACHED_CONFIGUREVARS += "ac_cv_path_XSLTPROC=xsltproc" |
23 | 23 | ||
24 | SRC_URI[archive.sha256sum] = "cc1b709a20eb030a278a1f9842a362e00402b7f834ae1df4c1998a723152bf43" | 24 | SRC_URI[archive.sha256sum] = "b20b72b32a80bc18c7f975c9d4c16460c2276566a0b50f87d6852dff3aa7861c" |
25 | SRC_URI += "file://0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch \ | 25 | SRC_URI += "file://0001-Do-not-hardocode-paths-to-perl-python-in-scripts.patch \ |
26 | file://0001-Do-not-error-out-if-xsltproc-is-not-found.patch \ | 26 | file://0001-Do-not-error-out-if-xsltproc-is-not-found.patch \ |
27 | file://conditionaltests.patch \ | 27 | file://conditionaltests.patch \ |
28 | file://no-clobber.patch \ | 28 | file://no-clobber.patch \ |
29 | file://0001-Don-t-use-docdir-from-environment.patch \ | ||
30 | " | 29 | " |
31 | SRC_URI:append:class-native = " file://pkg-config-native.patch" | 30 | SRC_URI:append:class-native = " file://pkg-config-native.patch" |
32 | 31 | ||
diff --git a/meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb b/meta/recipes-gnome/libadwaita/libadwaita_1.5.1.bb index b0b1e4502a..6cb67c0db0 100644 --- a/meta/recipes-gnome/libadwaita/libadwaita_1.5.0.bb +++ b/meta/recipes-gnome/libadwaita/libadwaita_1.5.1.bb | |||
@@ -3,7 +3,6 @@ HOMEPAGE = "https://gitlab.gnome.org/GNOME/libadwaita" | |||
3 | LICENSE="LGPL-2.1-or-later" | 3 | LICENSE="LGPL-2.1-or-later" |
4 | LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c" | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c" |
5 | 5 | ||
6 | |||
7 | DEPENDS = " \ | 6 | DEPENDS = " \ |
8 | gtk4 \ | 7 | gtk4 \ |
9 | appstream \ | 8 | appstream \ |
@@ -11,7 +10,7 @@ DEPENDS = " \ | |||
11 | 10 | ||
12 | inherit gnomebase gobject-introspection gi-docgen vala features_check | 11 | inherit gnomebase gobject-introspection gi-docgen vala features_check |
13 | 12 | ||
14 | SRC_URI[archive.sha256sum] = "fd92287df9bb95c963654fb6e70d3e082e2bcb37b147e0e3c905567167993783" | 13 | SRC_URI[archive.sha256sum] = "7f144c5887d6dd2d99517c00fd42395ee20abc13ce55958a4fda64e6d7e473f8" |
15 | 14 | ||
16 | ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}" | 15 | ANY_OF_DISTRO_FEATURES = "${GTK3DISTROFEATURES}" |
17 | REQUIRED_DISTRO_FEATURES = "opengl" | 16 | REQUIRED_DISTRO_FEATURES = "opengl" |
diff --git a/meta/recipes-gnome/libportal/files/0001-meson.build-fix-build-race-when-building-GTK-vapi-fi.patch b/meta/recipes-gnome/libportal/files/0001-meson.build-fix-build-race-when-building-GTK-vapi-fi.patch new file mode 100644 index 0000000000..fb015d3632 --- /dev/null +++ b/meta/recipes-gnome/libportal/files/0001-meson.build-fix-build-race-when-building-GTK-vapi-fi.patch | |||
@@ -0,0 +1,49 @@ | |||
1 | From 26f96a178f8a0afded00bdd7238728c0b6e42a6b Mon Sep 17 00:00:00 2001 | ||
2 | From: Richard Purdie <richard.purdie@linuxfoundation.org> | ||
3 | Date: Thu, 9 May 2024 18:44:41 +0000 | ||
4 | Subject: [PATCH] meson.build: fix build race when building GTK vapi files | ||
5 | |||
6 | There's a build race when building the GTK vapi files: | ||
7 | |||
8 | FAILED: libportal/libportal-gtk4.vapi | ||
9 | error: Package `libportal' not found in specified Vala API directories or GObject-Introspection GIR directories | ||
10 | |||
11 | This can be verified by adding "sleep 10;" to the command for the | ||
12 | libportal/libportal.vapi target in the generated build.ninja file. | ||
13 | |||
14 | The GTK vapi files need to have access to the generic libportal.vapi file, | ||
15 | but there is no explicit dependency. Switch the dependency name 'libportal' | ||
16 | to the dependency object libportal_vapi so that Meson generates the | ||
17 | dependency correctly. | ||
18 | |||
19 | Upstream-Status: Backport | ||
20 | Signed-off-by: Ross Burton <ross.burton@arm.com> | ||
21 | --- | ||
22 | libportal/meson.build | 4 ++-- | ||
23 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
24 | |||
25 | diff --git a/libportal/meson.build b/libportal/meson.build | ||
26 | index fff7603..4e67f40 100644 | ||
27 | --- a/libportal/meson.build | ||
28 | +++ b/libportal/meson.build | ||
29 | @@ -168,7 +168,7 @@ if gtk3_dep.found() | ||
30 | if vapi | ||
31 | libportal_gtk3_vapi = gnome.generate_vapi('libportal-gtk3', | ||
32 | sources: libportal_gtk3_gir[0], | ||
33 | - packages: ['gio-2.0', 'gtk+-3.0', 'libportal'], | ||
34 | + packages: ['gio-2.0', 'gtk+-3.0', libportal_vapi], | ||
35 | gir_dirs: [meson.current_build_dir()], | ||
36 | vapi_dirs: [meson.current_build_dir()], | ||
37 | install: true, | ||
38 | @@ -227,7 +227,7 @@ if gtk4_dep.found() | ||
39 | if vapi | ||
40 | libportal_gtk4_vapi = gnome.generate_vapi('libportal-gtk4', | ||
41 | sources: libportal_gtk4_gir[0], | ||
42 | - packages: ['gio-2.0', 'gtk4', 'libportal'], | ||
43 | + packages: ['gio-2.0', 'gtk4', libportal_vapi], | ||
44 | gir_dirs: [meson.current_build_dir()], | ||
45 | vapi_dirs: [meson.current_build_dir()], | ||
46 | install: true, | ||
47 | -- | ||
48 | 2.34.1 | ||
49 | |||
diff --git a/meta/recipes-gnome/libportal/libportal_0.7.1.bb b/meta/recipes-gnome/libportal/libportal_0.7.1.bb index 22e45559c9..6ddfef76d3 100644 --- a/meta/recipes-gnome/libportal/libportal_0.7.1.bb +++ b/meta/recipes-gnome/libportal/libportal_0.7.1.bb | |||
@@ -6,7 +6,8 @@ BUGTRACKER = "https://github.com/flatpak/libportal/issues" | |||
6 | LICENSE = "LGPL-3.0-only" | 6 | LICENSE = "LGPL-3.0-only" |
7 | LIC_FILES_CHKSUM = "file://COPYING;md5=3000208d539ec061b899bce1d9ce9404" | 7 | LIC_FILES_CHKSUM = "file://COPYING;md5=3000208d539ec061b899bce1d9ce9404" |
8 | 8 | ||
9 | SRC_URI = "git://github.com/flatpak/${BPN}.git;protocol=https;branch=main" | 9 | SRC_URI = "git://github.com/flatpak/${BPN}.git;protocol=https;branch=main \ |
10 | file://0001-meson.build-fix-build-race-when-building-GTK-vapi-fi.patch" | ||
10 | SRCREV = "e9ed3a50cdde321eaf42361212480a66eb94a57a" | 11 | SRCREV = "e9ed3a50cdde321eaf42361212480a66eb94a57a" |
11 | S = "${WORKDIR}/git" | 12 | S = "${WORKDIR}/git" |
12 | 13 | ||
diff --git a/meta/recipes-gnome/libxmlb/libxmlb_0.3.18.bb b/meta/recipes-gnome/libxmlb/libxmlb_0.3.19.bb index 10ceab0721..de8d860d00 100644 --- a/meta/recipes-gnome/libxmlb/libxmlb_0.3.18.bb +++ b/meta/recipes-gnome/libxmlb/libxmlb_0.3.19.bb | |||
@@ -8,7 +8,7 @@ SRC_URI = " \ | |||
8 | file://0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch \ | 8 | file://0001-xb-selftest.c-hardcode-G_TEST_SRCDIR.patch \ |
9 | file://run-ptest \ | 9 | file://run-ptest \ |
10 | " | 10 | " |
11 | SRCREV = "59cf2e0bf3c4d0905a32e32828d6100784993621" | 11 | SRCREV = "4393955fb7c8bbcb6a2c65ff54f16c39dc165e59" |
12 | S = "${WORKDIR}/git" | 12 | S = "${WORKDIR}/git" |
13 | 13 | ||
14 | DEPENDS = "glib-2.0 xz zstd" | 14 | DEPENDS = "glib-2.0 xz zstd" |
diff --git a/meta/recipes-graphics/builder/builder_0.1.bb b/meta/recipes-graphics/builder/builder_0.1.bb index 52c9351f93..7719b783c2 100644 --- a/meta/recipes-graphics/builder/builder_0.1.bb +++ b/meta/recipes-graphics/builder/builder_0.1.bb | |||
@@ -7,7 +7,8 @@ LIC_FILES_CHKSUM = "file://builder_session.sh;endline=5;md5=84796c3c41785d86100f | |||
7 | SRC_URI = "file://builder_session.sh \ | 7 | SRC_URI = "file://builder_session.sh \ |
8 | " | 8 | " |
9 | 9 | ||
10 | S = "${WORKDIR}" | 10 | S = "${WORKDIR}/sources" |
11 | UNPACKDIR = "${S}" | ||
11 | 12 | ||
12 | RDEPENDS:${PN} = "mini-x-session" | 13 | RDEPENDS:${PN} = "mini-x-session" |
13 | 14 | ||
diff --git a/meta/recipes-graphics/glslang/glslang_1.3.280.0.bb b/meta/recipes-graphics/glslang/glslang_1.3.283.0.bb index 637082c719..b58bd1e956 100644 --- a/meta/recipes-graphics/glslang/glslang_1.3.280.0.bb +++ b/meta/recipes-graphics/glslang/glslang_1.3.283.0.bb | |||
@@ -8,14 +8,14 @@ HOMEPAGE = "https://www.khronos.org/opengles/sdk/tools/Reference-Compiler" | |||
8 | LICENSE = "BSD-3-Clause & BSD-2-Clause & MIT & Apache-2.0 & GPL-3-with-bison-exception" | 8 | LICENSE = "BSD-3-Clause & BSD-2-Clause & MIT & Apache-2.0 & GPL-3-with-bison-exception" |
9 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=2a2b5acd7bc4844964cfda45fe807dc3" | 9 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=2a2b5acd7bc4844964cfda45fe807dc3" |
10 | 10 | ||
11 | SRCREV = "ee2f5d09eaf8f4e8d0d598bd2172fce290d4ca60" | 11 | SRCREV = "e8dd0b6903b34f1879520b444634c75ea2deedf5" |
12 | SRC_URI = "git://github.com/KhronosGroup/glslang.git;protocol=https;branch=main \ | 12 | SRC_URI = "git://github.com/KhronosGroup/glslang.git;protocol=https;branch=main \ |
13 | file://0001-generate-glslang-pkg-config.patch \ | 13 | file://0001-generate-glslang-pkg-config.patch \ |
14 | " | 14 | " |
15 | PE = "1" | 15 | PE = "1" |
16 | # These recipes need to be updated in lockstep with each other: | 16 | # These recipes need to be updated in lockstep with each other: |
17 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools | 17 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools |
18 | # vulkan-validation-layers, vulkan-utility-libraries. | 18 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
19 | # The tags versions should always be sdk-x.y.z, as this is what | 19 | # The tags versions should always be sdk-x.y.z, as this is what |
20 | # upstream considers a release. | 20 | # upstream considers a release. |
21 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 21 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/harfbuzz/harfbuzz_8.4.0.bb b/meta/recipes-graphics/harfbuzz/harfbuzz_8.5.0.bb index fc6951d9ed..6b0eb12046 100644 --- a/meta/recipes-graphics/harfbuzz/harfbuzz_8.4.0.bb +++ b/meta/recipes-graphics/harfbuzz/harfbuzz_8.5.0.bb | |||
@@ -9,7 +9,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b98429b8e8e3c2a67cfef01e99e4893d \ | |||
9 | " | 9 | " |
10 | 10 | ||
11 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz" | 11 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz" |
12 | SRC_URI[sha256sum] = "af4ea73e25ab748c8c063b78c2f88e48833db9b2ac369e29bd115702e789755e" | 12 | SRC_URI[sha256sum] = "77e4f7f98f3d86bf8788b53e6832fb96279956e1c3961988ea3d4b7ca41ddc27" |
13 | 13 | ||
14 | DEPENDS += "glib-2.0-native" | 14 | DEPENDS += "glib-2.0-native" |
15 | 15 | ||
@@ -32,6 +32,9 @@ PACKAGES =+ "${PN}-icu ${PN}-icu-dev ${PN}-subset" | |||
32 | 32 | ||
33 | LEAD_SONAME = "libharfbuzz.so" | 33 | LEAD_SONAME = "libharfbuzz.so" |
34 | 34 | ||
35 | # Remove when https://github.com/harfbuzz/harfbuzz/issues/4671 is resolved | ||
36 | EXTRA_OEMESON += "-Dcpp_std=c++17" | ||
37 | |||
35 | do_install:append() { | 38 | do_install:append() { |
36 | # If no tools are installed due to PACKAGECONFIG then this directory might | 39 | # If no tools are installed due to PACKAGECONFIG then this directory might |
37 | # still be installed, so remove it to stop packaging warnings. | 40 | # still be installed, so remove it to stop packaging warnings. |
diff --git a/meta/recipes-graphics/libsdl2/libsdl2_2.30.2.bb b/meta/recipes-graphics/libsdl2/libsdl2_2.30.3.bb index f9dacb288c..68cc2790e5 100644 --- a/meta/recipes-graphics/libsdl2/libsdl2_2.30.2.bb +++ b/meta/recipes-graphics/libsdl2/libsdl2_2.30.3.bb | |||
@@ -25,7 +25,7 @@ SRC_URI = "http://www.libsdl.org/release/SDL2-${PV}.tar.gz" | |||
25 | 25 | ||
26 | S = "${WORKDIR}/SDL2-${PV}" | 26 | S = "${WORKDIR}/SDL2-${PV}" |
27 | 27 | ||
28 | SRC_URI[sha256sum] = "891d66ac8cae51361d3229e3336ebec1c407a8a2a063b61df14f5fdf3ab5ac31" | 28 | SRC_URI[sha256sum] = "820440072f8f5b50188c1dae104f2ad25984de268785be40c41a099a510f0aec" |
29 | 29 | ||
30 | inherit cmake lib_package binconfig-disabled pkgconfig upstream-version-is-even | 30 | inherit cmake lib_package binconfig-disabled pkgconfig upstream-version-is-even |
31 | 31 | ||
diff --git a/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb b/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb index d1f0a67d84..8a4cfef631 100644 --- a/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb +++ b/meta/recipes-graphics/matchbox-session/matchbox-session_0.1.bb | |||
@@ -10,7 +10,9 @@ SECTION = "x11" | |||
10 | RCONFLICTS:${PN} = "matchbox-common" | 10 | RCONFLICTS:${PN} = "matchbox-common" |
11 | 11 | ||
12 | SRC_URI = "file://matchbox-session" | 12 | SRC_URI = "file://matchbox-session" |
13 | S = "${WORKDIR}" | 13 | |
14 | S = "${WORKDIR}/sources" | ||
15 | UNPACKDIR = "${S}" | ||
14 | 16 | ||
15 | 17 | ||
16 | inherit update-alternatives | 18 | inherit update-alternatives |
diff --git a/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch b/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch deleted file mode 100644 index 7be7d81eeb..0000000000 --- a/meta/recipes-graphics/mesa/files/0001-meson.build-check-for-all-linux-host_os-combinations.patch +++ /dev/null | |||
@@ -1,42 +0,0 @@ | |||
1 | From e8ec6b1cc5e401ba719095722d8b317d755ae613 Mon Sep 17 00:00:00 2001 | ||
2 | From: Alistair Francis <alistair@alistair23.me> | ||
3 | Date: Thu, 14 Nov 2019 13:04:49 -0800 | ||
4 | Subject: [PATCH] meson.build: check for all linux host_os combinations | ||
5 | |||
6 | Make sure that we are also looking for our host_os combinations like | ||
7 | linux-musl etc. when assuming support for DRM/KMS. | ||
8 | |||
9 | Also delete a duplicate line. | ||
10 | |||
11 | Upstream-Status: Pending | ||
12 | |||
13 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | ||
14 | Signed-off-by: Fabio Berton <fabio.berton@ossystems.com.br> | ||
15 | Signed-off-by: Otavio Salvador <otavio@ossystems.com.br> | ||
16 | Signed-off-by: Alistair Francis <alistair@alistair23.me> | ||
17 | --- | ||
18 | meson.build | 4 ++-- | ||
19 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
20 | |||
21 | diff --git a/meson.build b/meson.build | ||
22 | index 133fd9a..817861e 100644 | ||
23 | --- a/meson.build | ||
24 | +++ b/meson.build | ||
25 | @@ -128,7 +128,7 @@ with_any_opengl = with_opengl or with_gles1 or with_gles2 | ||
26 | # Only build shared_glapi if at least one OpenGL API is enabled | ||
27 | with_shared_glapi = with_shared_glapi and with_any_opengl | ||
28 | |||
29 | -system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'gnu/kfreebsd', 'dragonfly', 'linux', 'sunos', 'android', 'managarm'].contains(host_machine.system()) | ||
30 | +system_has_kms_drm = ['openbsd', 'netbsd', 'freebsd', 'gnu/kfreebsd', 'dragonfly', 'linux', 'sunos', 'android', 'managarm'].contains(host_machine.system()) or host_machine.system().startswith('linux') | ||
31 | |||
32 | gallium_drivers = get_option('gallium-drivers') | ||
33 | if gallium_drivers.contains('auto') | ||
34 | @@ -997,7 +997,7 @@ if cc.has_function('fmemopen') | ||
35 | endif | ||
36 | |||
37 | # TODO: this is very incomplete | ||
38 | -if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku', 'android', 'managarm'].contains(host_machine.system()) | ||
39 | +if ['linux', 'cygwin', 'gnu', 'freebsd', 'gnu/kfreebsd', 'haiku', 'android', 'managarm'].contains(host_machine.system()) or host_machine.system().startswith('linux') | ||
40 | pre_args += '-D_GNU_SOURCE' | ||
41 | elif host_machine.system() == 'sunos' | ||
42 | pre_args += '-D__EXTENSIONS__' | ||
diff --git a/meta/recipes-graphics/mesa/mesa-gl_24.0.5.bb b/meta/recipes-graphics/mesa/mesa-gl_24.0.7.bb index ca160f1bfc..ca160f1bfc 100644 --- a/meta/recipes-graphics/mesa/mesa-gl_24.0.5.bb +++ b/meta/recipes-graphics/mesa/mesa-gl_24.0.7.bb | |||
diff --git a/meta/recipes-graphics/mesa/mesa.inc b/meta/recipes-graphics/mesa/mesa.inc index 77e9c80fcb..2581ccb120 100644 --- a/meta/recipes-graphics/mesa/mesa.inc +++ b/meta/recipes-graphics/mesa/mesa.inc | |||
@@ -15,14 +15,13 @@ LIC_FILES_CHKSUM = "file://docs/license.rst;md5=63779ec98d78d823a9dc533a0735ef10 | |||
15 | PE = "2" | 15 | PE = "2" |
16 | 16 | ||
17 | SRC_URI = "https://mesa.freedesktop.org/archive/mesa-${PV}.tar.xz \ | 17 | SRC_URI = "https://mesa.freedesktop.org/archive/mesa-${PV}.tar.xz \ |
18 | file://0001-meson.build-check-for-all-linux-host_os-combinations.patch \ | ||
19 | file://0001-meson-misdetects-64bit-atomics-on-mips-clang.patch \ | 18 | file://0001-meson-misdetects-64bit-atomics-on-mips-clang.patch \ |
20 | file://0001-drisw-fix-build-without-dri3.patch \ | 19 | file://0001-drisw-fix-build-without-dri3.patch \ |
21 | file://0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch \ | 20 | file://0002-glxext-don-t-try-zink-if-not-enabled-in-mesa.patch \ |
22 | file://0001-Revert-meson-do-not-pull-in-clc-for-clover.patch \ | 21 | file://0001-Revert-meson-do-not-pull-in-clc-for-clover.patch \ |
23 | " | 22 | " |
24 | 23 | ||
25 | SRC_URI[sha256sum] = "38cc245ca8faa3c69da6d2687f8906377001f63365348a62cc6f7fafb1e8c018" | 24 | SRC_URI[sha256sum] = "7454425f1ed4a6f1b5b107e1672b30c88b22ea0efea000ae2c7d96db93f6c26a" |
26 | 25 | ||
27 | UPSTREAM_CHECK_GITTAGREGEX = "mesa-(?P<pver>\d+(\.\d+)+)" | 26 | UPSTREAM_CHECK_GITTAGREGEX = "mesa-(?P<pver>\d+(\.\d+)+)" |
28 | 27 | ||
diff --git a/meta/recipes-graphics/mesa/mesa_24.0.5.bb b/meta/recipes-graphics/mesa/mesa_24.0.7.bb index 96e8aa38d6..96e8aa38d6 100644 --- a/meta/recipes-graphics/mesa/mesa_24.0.5.bb +++ b/meta/recipes-graphics/mesa/mesa_24.0.7.bb | |||
diff --git a/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb b/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb index ba81d0b17a..48d50c8f66 100644 --- a/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb +++ b/meta/recipes-graphics/mini-x-session/mini-x-session_0.1.bb | |||
@@ -11,7 +11,9 @@ SECTION = "x11" | |||
11 | RCONFLICTS:${PN} = "matchbox-common" | 11 | RCONFLICTS:${PN} = "matchbox-common" |
12 | 12 | ||
13 | SRC_URI = "file://mini-x-session" | 13 | SRC_URI = "file://mini-x-session" |
14 | S = "${WORKDIR}" | 14 | |
15 | S = "${WORKDIR}/sources" | ||
16 | UNPACKDIR = "${S}" | ||
15 | 17 | ||
16 | RDEPENDS:${PN} = "sudo" | 18 | RDEPENDS:${PN} = "sudo" |
17 | 19 | ||
diff --git a/meta/recipes-graphics/pong-clock/pong-clock_1.0.bb b/meta/recipes-graphics/pong-clock/pong-clock_1.0.bb index d0794f8ee6..ee5173dc83 100644 --- a/meta/recipes-graphics/pong-clock/pong-clock_1.0.bb +++ b/meta/recipes-graphics/pong-clock/pong-clock_1.0.bb | |||
@@ -10,7 +10,8 @@ SRC_URI = "file://pong-clock-no-flicker.c" | |||
10 | 10 | ||
11 | LIC_FILES_CHKSUM = "file://pong-clock-no-flicker.c;beginline=1;endline=23;md5=dd248d50f73f746d1ee78586b0b2ebd3" | 11 | LIC_FILES_CHKSUM = "file://pong-clock-no-flicker.c;beginline=1;endline=23;md5=dd248d50f73f746d1ee78586b0b2ebd3" |
12 | 12 | ||
13 | S = "${WORKDIR}" | 13 | S = "${WORKDIR}/sources" |
14 | UNPACKDIR = "${S}" | ||
14 | 15 | ||
15 | do_compile () { | 16 | do_compile () { |
16 | ${CC} ${CFLAGS} ${LDFLAGS} -o pong-clock pong-clock-no-flicker.c `pkg-config --cflags --libs x11 xau xdmcp` | 17 | ${CC} ${CFLAGS} ${LDFLAGS} -o pong-clock pong-clock-no-flicker.c `pkg-config --cflags --libs x11 xau xdmcp` |
diff --git a/meta/recipes-graphics/shaderc/shaderc_2024.0.bb b/meta/recipes-graphics/shaderc/shaderc_2024.1.bb index 9975c608ac..1f0b12ef57 100644 --- a/meta/recipes-graphics/shaderc/shaderc_2024.0.bb +++ b/meta/recipes-graphics/shaderc/shaderc_2024.1.bb | |||
@@ -6,7 +6,7 @@ HOMEPAGE = "https://github.com/google/shaderc" | |||
6 | LICENSE = "Apache-2.0" | 6 | LICENSE = "Apache-2.0" |
7 | LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327" | 7 | LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327" |
8 | 8 | ||
9 | SRCREV = "9f56ca620c07d6c4d119c65c1c1f3f1c584c9985" | 9 | SRCREV = "5d7736da11e626b49c5463aadb2afb228ad20276" |
10 | SRC_URI = "git://github.com/google/shaderc.git;protocol=https;branch=main \ | 10 | SRC_URI = "git://github.com/google/shaderc.git;protocol=https;branch=main \ |
11 | file://0001-cmake-disable-building-external-dependencies.patch \ | 11 | file://0001-cmake-disable-building-external-dependencies.patch \ |
12 | file://0002-libshaderc_util-fix-glslang-header-file-location.patch \ | 12 | file://0002-libshaderc_util-fix-glslang-header-file-location.patch \ |
diff --git a/meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb b/meta/recipes-graphics/spir/spirv-headers_1.3.283.0.bb index 26bfd9c4fa..5ec7e42b7c 100644 --- a/meta/recipes-graphics/spir/spirv-headers_1.3.280.0.bb +++ b/meta/recipes-graphics/spir/spirv-headers_1.3.283.0.bb | |||
@@ -4,12 +4,12 @@ HOMEPAGE = "https://www.khronos.org/registry/spir-v" | |||
4 | LICENSE = "MIT" | 4 | LICENSE = "MIT" |
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=d14ee3b13f42e9c9674acc5925c3d741" | 5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=d14ee3b13f42e9c9674acc5925c3d741" |
6 | 6 | ||
7 | SRCREV = "8b246ff75c6615ba4532fe4fde20f1be090c3764" | 7 | SRCREV = "4f7b471f1a66b6d06462cd4ba57628cc0cd087d7" |
8 | SRC_URI = "git://github.com/KhronosGroup/SPIRV-Headers;protocol=https;branch=main" | 8 | SRC_URI = "git://github.com/KhronosGroup/SPIRV-Headers;protocol=https;branch=main" |
9 | PE = "1" | 9 | PE = "1" |
10 | # These recipes need to be updated in lockstep with each other: | 10 | # These recipes need to be updated in lockstep with each other: |
11 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools | 11 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools |
12 | # vulkan-validation-layers, vulkan-utility-libraries. | 12 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
13 | # The tags versions should always be sdk-x.y.z, as this is what | 13 | # The tags versions should always be sdk-x.y.z, as this is what |
14 | # upstream considers a release. | 14 | # upstream considers a release. |
15 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 15 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb b/meta/recipes-graphics/spir/spirv-tools_1.3.283.0.bb index d2b6acf946..292a17f00d 100644 --- a/meta/recipes-graphics/spir/spirv-tools_1.3.280.0.bb +++ b/meta/recipes-graphics/spir/spirv-tools_1.3.283.0.bb | |||
@@ -7,12 +7,12 @@ SECTION = "graphics" | |||
7 | LICENSE = "Apache-2.0" | 7 | LICENSE = "Apache-2.0" |
8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57" | 8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57" |
9 | 9 | ||
10 | SRCREV = "04896c462d9f3f504c99a4698605b6524af813c1" | 10 | SRCREV = "dd4b663e13c07fea4fbb3f70c1c91c86731099f7" |
11 | SRC_URI = "git://github.com/KhronosGroup/SPIRV-Tools.git;branch=main;protocol=https" | 11 | SRC_URI = "git://github.com/KhronosGroup/SPIRV-Tools.git;branch=main;protocol=https" |
12 | PE = "1" | 12 | PE = "1" |
13 | # These recipes need to be updated in lockstep with each other: | 13 | # These recipes need to be updated in lockstep with each other: |
14 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools | 14 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools |
15 | # vulkan-validation-layers, vulkan-utility-libraries. | 15 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
16 | # The tags versions should always be sdk-x.y.z, as this is what | 16 | # The tags versions should always be sdk-x.y.z, as this is what |
17 | # upstream considers a release. | 17 | # upstream considers a release. |
18 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 18 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-headers_1.3.283.0.bb index 371cc7304d..6423ef9df6 100644 --- a/meta/recipes-graphics/vulkan/vulkan-headers_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-headers_1.3.283.0.bb | |||
@@ -11,7 +11,7 @@ LICENSE = "Apache-2.0 & MIT" | |||
11 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=1bc355d8c4196f774c8b87ed1a8dd625" | 11 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=1bc355d8c4196f774c8b87ed1a8dd625" |
12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Headers.git;branch=main;protocol=https" | 12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Headers.git;branch=main;protocol=https" |
13 | 13 | ||
14 | SRCREV = "577baa05033cf1d9236b3d078ca4b3269ed87a2b" | 14 | SRCREV = "eaa319dade959cb61ed2229c8ea42e307cc8f8b3" |
15 | 15 | ||
16 | S = "${WORKDIR}/git" | 16 | S = "${WORKDIR}/git" |
17 | 17 | ||
@@ -22,7 +22,7 @@ RDEPENDS:${PN} += "python3-core" | |||
22 | 22 | ||
23 | # These recipes need to be updated in lockstep with each other: | 23 | # These recipes need to be updated in lockstep with each other: |
24 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools, | 24 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools, |
25 | # vulkan-validation-layers, vulkan-utility-libraries. | 25 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
26 | # The tags versions should always be sdk-x.y.z, as this is what | 26 | # The tags versions should always be sdk-x.y.z, as this is what |
27 | # upstream considers a release. | 27 | # upstream considers a release. |
28 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 28 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-loader_1.3.283.0.bb index b738771801..6c3197e13f 100644 --- a/meta/recipes-graphics/vulkan/vulkan-loader_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-loader_1.3.283.0.bb | |||
@@ -9,8 +9,8 @@ SECTION = "libs" | |||
9 | 9 | ||
10 | LICENSE = "Apache-2.0" | 10 | LICENSE = "Apache-2.0" |
11 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7dbefed23242760aa3475ee42801c5ac" | 11 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7dbefed23242760aa3475ee42801c5ac" |
12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Loader.git;branch=vulkan-sdk-1.3.280;protocol=https" | 12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Loader.git;branch=vulkan-sdk-1.3.283;protocol=https" |
13 | SRCREV = "61a9c50248e09f3a0e0be7ce6f8bb1663855f979" | 13 | SRCREV = "720be5198aad4696381d2e3eeadc131c9f56bdc6" |
14 | 14 | ||
15 | S = "${WORKDIR}/git" | 15 | S = "${WORKDIR}/git" |
16 | 16 | ||
@@ -37,7 +37,7 @@ RRECOMMENDS:${PN} = "mesa-vulkan-drivers" | |||
37 | 37 | ||
38 | # These recipes need to be updated in lockstep with each other: | 38 | # These recipes need to be updated in lockstep with each other: |
39 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools, | 39 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools, |
40 | # vulkan-validation-layers, vulkan-utility-libraries. | 40 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
41 | # The tags versions should always be sdk-x.y.z, as this is what | 41 | # The tags versions should always be sdk-x.y.z, as this is what |
42 | # upstream considers a release. | 42 | # upstream considers a release. |
43 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 43 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-tools_1.3.283.0.bb index a7e4a67aaa..a76f60735f 100644 --- a/meta/recipes-graphics/vulkan/vulkan-tools_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-tools_1.3.283.0.bb | |||
@@ -6,8 +6,8 @@ SECTION = "libs" | |||
6 | 6 | ||
7 | LICENSE = "Apache-2.0" | 7 | LICENSE = "Apache-2.0" |
8 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3b83ef96387f14655fc854ddc3c6bd57" | 8 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3b83ef96387f14655fc854ddc3c6bd57" |
9 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Tools.git;branch=vulkan-sdk-1.3.280;protocol=https" | 9 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Tools.git;branch=vulkan-sdk-1.3.283;protocol=https" |
10 | SRCREV = "136976082d0b14dad8b9687982b2a80cc6e6a633" | 10 | SRCREV = "38321da9031f5909f1ca2dbafac8840ef6b2c144" |
11 | 11 | ||
12 | S = "${WORKDIR}/git" | 12 | S = "${WORKDIR}/git" |
13 | 13 | ||
@@ -31,7 +31,7 @@ PACKAGECONFIG[wayland] = "-DBUILD_WSI_WAYLAND_SUPPORT=ON, -DBUILD_WSI_WAYLAND_SU | |||
31 | 31 | ||
32 | # These recipes need to be updated in lockstep with each other: | 32 | # These recipes need to be updated in lockstep with each other: |
33 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools | 33 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, spirv-headers, spirv-tools |
34 | # vulkan-validation-layers, vulkan-utility-libraries. | 34 | # vulkan-validation-layers, vulkan-utility-libraries, vulkan-volk. |
35 | # The tags versions should always be sdk-x.y.z, as this is what | 35 | # The tags versions should always be sdk-x.y.z, as this is what |
36 | # upstream considers a release. | 36 | # upstream considers a release. |
37 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 37 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.283.0.bb index 3ab31af96a..3274bea8fd 100644 --- a/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-utility-libraries_1.3.283.0.bb | |||
@@ -10,7 +10,7 @@ LICENSE = "Apache-2.0" | |||
10 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=4ca2d6799091aaa98a8520f1b793939b" | 10 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=4ca2d6799091aaa98a8520f1b793939b" |
11 | 11 | ||
12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Utility-Libraries.git;branch=main;protocol=https" | 12 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-Utility-Libraries.git;branch=main;protocol=https" |
13 | SRCREV = "a4140c5fd47dcf3a030726a60b293db61cfb54a3" | 13 | SRCREV = "ad7f699a7b2b5deb66eb3de19f24aa33597ed65b" |
14 | 14 | ||
15 | S = "${WORKDIR}/git" | 15 | S = "${WORKDIR}/git" |
16 | 16 | ||
@@ -27,7 +27,7 @@ inherit cmake features_check pkgconfig | |||
27 | # These recipes need to be updated in lockstep with each other: | 27 | # These recipes need to be updated in lockstep with each other: |
28 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, | 28 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, |
29 | # vulkan-validation-layers, spirv-headers, spirv-tools, | 29 | # vulkan-validation-layers, spirv-headers, spirv-tools, |
30 | # vulkan-utility-libraries. | 30 | # vulkan-utility-libraries, vulkan-volk. |
31 | # The tags versions should always be sdk-x.y.z, as this is what | 31 | # The tags versions should always be sdk-x.y.z, as this is what |
32 | # upstream considers a release. | 32 | # upstream considers a release. |
33 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 33 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.283.0.bb index c488309c91..2f1efba856 100644 --- a/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-validation-layers_1.3.283.0.bb | |||
@@ -8,8 +8,8 @@ SECTION = "libs" | |||
8 | LICENSE = "Apache-2.0 & MIT" | 8 | LICENSE = "Apache-2.0 & MIT" |
9 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=cd3c0bc366cd9b6a906e22f0bcb5910f" | 9 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=cd3c0bc366cd9b6a906e22f0bcb5910f" |
10 | 10 | ||
11 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-ValidationLayers.git;branch=vulkan-sdk-1.3.280;protocol=https" | 11 | SRC_URI = "git://github.com/KhronosGroup/Vulkan-ValidationLayers.git;branch=vulkan-sdk-1.3.283;protocol=https" |
12 | SRCREV = "8506077b9a25a00684e8be24b779733ae1405a54" | 12 | SRCREV = "d5bed3e26b3d487e8d21f0cc39039351eac921a7" |
13 | 13 | ||
14 | S = "${WORKDIR}/git" | 14 | S = "${WORKDIR}/git" |
15 | 15 | ||
@@ -43,7 +43,7 @@ FILES_SOLIBSDEV = "" | |||
43 | # These recipes need to be updated in lockstep with each other: | 43 | # These recipes need to be updated in lockstep with each other: |
44 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, | 44 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, |
45 | # vulkan-validation-layers, spirv-headers, spirv-tools, | 45 | # vulkan-validation-layers, spirv-headers, spirv-tools, |
46 | # vulkan-utility-libraries. | 46 | # vulkan-utility-libraries, vulkan-volk. |
47 | # The tags versions should always be sdk-x.y.z, as this is what | 47 | # The tags versions should always be sdk-x.y.z, as this is what |
48 | # upstream considers a release. | 48 | # upstream considers a release. |
49 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 49 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb b/meta/recipes-graphics/vulkan/vulkan-volk_1.3.283.0.bb index 2ef12fedf8..5485aa0b0d 100644 --- a/meta/recipes-graphics/vulkan/vulkan-volk_1.3.280.0.bb +++ b/meta/recipes-graphics/vulkan/vulkan-volk_1.3.283.0.bb | |||
@@ -10,7 +10,7 @@ LICENSE = "MIT" | |||
10 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=12e6af3a0e2a5e5dbf7796aa82b64626" | 10 | LIC_FILES_CHKSUM = "file://LICENSE.md;md5=12e6af3a0e2a5e5dbf7796aa82b64626" |
11 | 11 | ||
12 | SRC_URI = "git://github.com/zeux/volk.git;branch=master;protocol=https" | 12 | SRC_URI = "git://github.com/zeux/volk.git;branch=master;protocol=https" |
13 | SRCREV = "01986ac85fa2e5c70df09aeae9c907e27c5d50b2" | 13 | SRCREV = "3a8068a57417940cf2bf9d837a7bb60d015ca2f1" |
14 | 14 | ||
15 | S = "${WORKDIR}/git" | 15 | S = "${WORKDIR}/git" |
16 | 16 | ||
@@ -27,7 +27,7 @@ inherit cmake features_check pkgconfig | |||
27 | # These recipes need to be updated in lockstep with each other: | 27 | # These recipes need to be updated in lockstep with each other: |
28 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, | 28 | # glslang, vulkan-headers, vulkan-loader, vulkan-tools, |
29 | # vulkan-validation-layers, spirv-headers, spirv-tools, | 29 | # vulkan-validation-layers, spirv-headers, spirv-tools, |
30 | # vulkan-utility-libraries. | 30 | # vulkan-utility-libraries, vulkan-volk. |
31 | # The tags versions should always be sdk-x.y.z, as this is what | 31 | # The tags versions should always be sdk-x.y.z, as this is what |
32 | # upstream considers a release. | 32 | # upstream considers a release. |
33 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" | 33 | UPSTREAM_CHECK_GITTAGREGEX = "sdk-(?P<pver>\d+(\.\d+)+)" |
diff --git a/meta/recipes-graphics/wayland/weston-init.bb b/meta/recipes-graphics/wayland/weston-init.bb index ca3d1478dc..83f0cea49c 100644 --- a/meta/recipes-graphics/wayland/weston-init.bb +++ b/meta/recipes-graphics/wayland/weston-init.bb | |||
@@ -13,7 +13,8 @@ SRC_URI = "file://init \ | |||
13 | file://weston-autologin \ | 13 | file://weston-autologin \ |
14 | file://weston-start" | 14 | file://weston-start" |
15 | 15 | ||
16 | S = "${WORKDIR}" | 16 | S = "${WORKDIR}/sources" |
17 | UNPACKDIR = "${S}" | ||
17 | 18 | ||
18 | PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xwayland', '', d)}" | 19 | PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xwayland', '', d)}" |
19 | PACKAGECONFIG:append:qemuriscv64 = " use-pixman" | 20 | PACKAGECONFIG:append:qemuriscv64 = " use-pixman" |
diff --git a/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb b/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb index 4e38b4da34..cd4acf8155 100644 --- a/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb +++ b/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb | |||
@@ -12,7 +12,8 @@ SRC_URI = "file://xserver-nodm \ | |||
12 | file://capability.conf \ | 12 | file://capability.conf \ |
13 | " | 13 | " |
14 | 14 | ||
15 | S = "${WORKDIR}" | 15 | S = "${WORKDIR}/sources" |
16 | UNPACKDIR = "${S}" | ||
16 | 17 | ||
17 | # Since we refer to ROOTLESS_X which is normally enabled per-machine | 18 | # Since we refer to ROOTLESS_X which is normally enabled per-machine |
18 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 19 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
diff --git a/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb b/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb index 5c9742fb20..f4516e6975 100644 --- a/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb +++ b/meta/recipes-graphics/xinput-calibrator/pointercal-xinput_0.0.bb | |||
@@ -7,7 +7,9 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384 | |||
7 | 7 | ||
8 | 8 | ||
9 | SRC_URI = "file://pointercal.xinput" | 9 | SRC_URI = "file://pointercal.xinput" |
10 | S = "${WORKDIR}" | 10 | |
11 | S = "${WORKDIR}/sources" | ||
12 | UNPACKDIR = "${S}" | ||
11 | 13 | ||
12 | do_install() { | 14 | do_install() { |
13 | # Only install file if it has a contents | 15 | # Only install file if it has a contents |
diff --git a/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch b/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch index 5b159d646d..604e987551 100644 --- a/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch +++ b/meta/recipes-graphics/xorg-lib/libxcb/0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From d55b6b1fa87700f3eae3a29522972d2e7be7d53e Mon Sep 17 00:00:00 2001 | 1 | From b529f25a3dcd1547f49d341a0a34fbc1379ed58d Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sat, 14 Jan 2023 10:11:35 -0800 | 3 | Date: Sat, 14 Jan 2023 10:11:35 -0800 |
4 | Subject: [PATCH] use _Alignof to avoid UB in ALIGNOF | 4 | Subject: [PATCH] use _Alignof to avoid UB in ALIGNOF |
@@ -20,7 +20,7 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com> | |||
20 | 1 file changed, 1 insertion(+), 2 deletions(-) | 20 | 1 file changed, 1 insertion(+), 2 deletions(-) |
21 | 21 | ||
22 | diff --git a/src/c_client.py b/src/c_client.py | 22 | diff --git a/src/c_client.py b/src/c_client.py |
23 | index fd256f0..4e48f13 100644 | 23 | index b7db543..ec81758 100644 |
24 | --- a/src/c_client.py | 24 | --- a/src/c_client.py |
25 | +++ b/src/c_client.py | 25 | +++ b/src/c_client.py |
26 | @@ -288,7 +288,6 @@ def c_open(self): | 26 | @@ -288,7 +288,6 @@ def c_open(self): |
@@ -40,6 +40,3 @@ index fd256f0..4e48f13 100644 | |||
40 | % (space, | 40 | % (space, |
41 | 'char' | 41 | 'char' |
42 | if field.c_field_type == 'void' or field.type.is_switch | 42 | if field.c_field_type == 'void' or field.type.is_switch |
43 | -- | ||
44 | 2.39.0 | ||
45 | |||
diff --git a/meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb b/meta/recipes-graphics/xorg-lib/libxcb_1.17.0.bb index de3290aa9f..fcd8aad303 100644 --- a/meta/recipes-graphics/xorg-lib/libxcb_1.16.1.bb +++ b/meta/recipes-graphics/xorg-lib/libxcb_1.17.0.bb | |||
@@ -12,7 +12,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7" | |||
12 | SRC_URI = "http://xcb.freedesktop.org/dist/libxcb-${PV}.tar.xz \ | 12 | SRC_URI = "http://xcb.freedesktop.org/dist/libxcb-${PV}.tar.xz \ |
13 | file://0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch" | 13 | file://0001-use-_Alignof-to-avoid-UB-in-ALIGNOF.patch" |
14 | 14 | ||
15 | SRC_URI[sha256sum] = "f24d187154c8e027b358fc7cb6588e35e33e6a92f11c668fe77396a7ae66e311" | 15 | SRC_URI[sha256sum] = "599ebf9996710fea71622e6e184f3a8ad5b43d0e5fa8c4e407123c88a59a6d55" |
16 | 16 | ||
17 | BBCLASSEXTEND = "native nativesdk" | 17 | BBCLASSEXTEND = "native nativesdk" |
18 | 18 | ||
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb index a3a4733785..0972a5dd78 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb +++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bb | |||
@@ -6,7 +6,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384 | |||
6 | 6 | ||
7 | SRC_URI = "file://xorg.conf" | 7 | SRC_URI = "file://xorg.conf" |
8 | 8 | ||
9 | S = "${WORKDIR}" | 9 | S = "${WORKDIR}/sources" |
10 | UNPACKDIR = "${S}" | ||
10 | 11 | ||
11 | CONFFILES:${PN} = "${sysconfdir}/X11/xorg.conf" | 12 | CONFFILES:${PN} = "${sysconfdir}/X11/xorg.conf" |
12 | 13 | ||
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-Avoid-duplicate-definitions-of-IOPortBase.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-Avoid-duplicate-definitions-of-IOPortBase.patch index 11d5546537..e9cbc9b4da 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-Avoid-duplicate-definitions-of-IOPortBase.patch +++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-Avoid-duplicate-definitions-of-IOPortBase.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From ce3b8a230a3805c9b557c1f106795675bd034860 Mon Sep 17 00:00:00 2001 | 1 | From cedc797e1a0850039a25b7e387b342e54fffcc97 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Mon, 17 Aug 2020 10:50:51 -0700 | 3 | Date: Mon, 17 Aug 2020 10:50:51 -0700 |
4 | Subject: [PATCH] Avoid duplicate definitions of IOPortBase | 4 | Subject: [PATCH] Avoid duplicate definitions of IOPortBase |
@@ -10,7 +10,6 @@ compiler.h:528: multiple definition of `IOPortBase'; | |||
10 | 10 | ||
11 | Upstream-Status: Pending | 11 | Upstream-Status: Pending |
12 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 12 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
13 | |||
14 | --- | 13 | --- |
15 | hw/xfree86/os-support/linux/lnx_video.c | 1 + | 14 | hw/xfree86/os-support/linux/lnx_video.c | 1 + |
16 | 1 file changed, 1 insertion(+) | 15 | 1 file changed, 1 insertion(+) |
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch index d05eec5bb9..d1516c2f52 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch +++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From d77cdc5e1eee26821ab98c947abea53fb7b18fe5 Mon Sep 17 00:00:00 2001 | 1 | From ad8967de36e6e2a185b71ce1d701448cda4ef3e2 Mon Sep 17 00:00:00 2001 |
2 | From: California Sullivan <california.l.sullivan@intel.com> | 2 | From: California Sullivan <california.l.sullivan@intel.com> |
3 | Date: Fri, 16 Mar 2018 17:23:11 -0700 | 3 | Date: Fri, 16 Mar 2018 17:23:11 -0700 |
4 | Subject: [PATCH] xf86pciBus.c: use Intel ddx only for pre-gen4 hardware | 4 | Subject: [PATCH] xf86pciBus.c: use Intel ddx only for pre-gen4 hardware |
@@ -20,10 +20,10 @@ Signed-off-by: California Sullivan <california.l.sullivan@intel.com> | |||
20 | 1 file changed, 17 insertions(+), 1 deletion(-) | 20 | 1 file changed, 17 insertions(+), 1 deletion(-) |
21 | 21 | ||
22 | diff --git a/hw/xfree86/common/xf86pciBus.c b/hw/xfree86/common/xf86pciBus.c | 22 | diff --git a/hw/xfree86/common/xf86pciBus.c b/hw/xfree86/common/xf86pciBus.c |
23 | index e61ae0cd4..d70c99197 100644 | 23 | index aeeed8b..db705bf 100644 |
24 | --- a/hw/xfree86/common/xf86pciBus.c | 24 | --- a/hw/xfree86/common/xf86pciBus.c |
25 | +++ b/hw/xfree86/common/xf86pciBus.c | 25 | +++ b/hw/xfree86/common/xf86pciBus.c |
26 | @@ -1173,7 +1173,23 @@ xf86VideoPtrToDriverList(struct pci_device *dev, | 26 | @@ -1174,7 +1174,23 @@ xf86VideoPtrToDriverList(struct pci_device *dev, XF86MatchedDrivers *md) |
27 | case 0x0bef: | 27 | case 0x0bef: |
28 | /* Use fbdev/vesa driver on Oaktrail, Medfield, CDV */ | 28 | /* Use fbdev/vesa driver on Oaktrail, Medfield, CDV */ |
29 | break; | 29 | break; |
@@ -48,6 +48,3 @@ index e61ae0cd4..d70c99197 100644 | |||
48 | driverList[0] = "intel"; | 48 | driverList[0] = "intel"; |
49 | break; | 49 | break; |
50 | } | 50 | } |
51 | -- | ||
52 | 2.14.3 | ||
53 | |||
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb b/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.13.bb index 6506d775ca..1f18c22fa8 100644 --- a/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.11.bb +++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg_21.1.13.bb | |||
@@ -3,7 +3,7 @@ require xserver-xorg.inc | |||
3 | SRC_URI += "file://0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch \ | 3 | SRC_URI += "file://0001-xf86pciBus.c-use-Intel-ddx-only-for-pre-gen4-hardwar.patch \ |
4 | file://0001-Avoid-duplicate-definitions-of-IOPortBase.patch \ | 4 | file://0001-Avoid-duplicate-definitions-of-IOPortBase.patch \ |
5 | " | 5 | " |
6 | SRC_URI[sha256sum] = "1d3dadbd57fb86b16a018e9f5f957aeeadf744f56c0553f55737628d06d326ef" | 6 | SRC_URI[sha256sum] = "b45a02d5943f72236a360d3cc97e75134aa4f63039ff88c04686b508a3dc740c" |
7 | 7 | ||
8 | # These extensions are now integrated into the server, so declare the migration | 8 | # These extensions are now integrated into the server, so declare the migration |
9 | # path for in-place upgrades. | 9 | # path for in-place upgrades. |
diff --git a/meta/recipes-graphics/xwayland/xwayland_23.2.6.bb b/meta/recipes-graphics/xwayland/xwayland_24.1.0.bb index f51429bc8c..251b3af857 100644 --- a/meta/recipes-graphics/xwayland/xwayland_23.2.6.bb +++ b/meta/recipes-graphics/xwayland/xwayland_24.1.0.bb | |||
@@ -10,7 +10,7 @@ LICENSE = "MIT" | |||
10 | LIC_FILES_CHKSUM = "file://COPYING;md5=5df87950af51ac2c5822094553ea1880" | 10 | LIC_FILES_CHKSUM = "file://COPYING;md5=5df87950af51ac2c5822094553ea1880" |
11 | 11 | ||
12 | SRC_URI = "https://www.x.org/archive/individual/xserver/xwayland-${PV}.tar.xz" | 12 | SRC_URI = "https://www.x.org/archive/individual/xserver/xwayland-${PV}.tar.xz" |
13 | SRC_URI[sha256sum] = "1c9a366b4e7ccadba0f9bd313c59eae12d23bd72543b22a26eaf8b20835cfc6d" | 13 | SRC_URI[sha256sum] = "bef21c4f18807a4ed571c4e2df60ab63b5466bbd502ecceb2485b892ab76dcc2" |
14 | 14 | ||
15 | UPSTREAM_CHECK_REGEX = "xwayland-(?P<pver>\d+(\.(?!90\d)\d+)+)\.tar" | 15 | UPSTREAM_CHECK_REGEX = "xwayland-(?P<pver>\d+(\.(?!90\d)\d+)+)\.tar" |
16 | 16 | ||
diff --git a/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb b/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb index 332525f5c9..a766b00bef 100644 --- a/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb +++ b/meta/recipes-kernel/dtc/python3-dtschema-wrapper_2021.10.bb | |||
@@ -7,6 +7,9 @@ SRC_URI = "file://dt-doc-validate \ | |||
7 | file://dt-mk-schema \ | 7 | file://dt-mk-schema \ |
8 | file://dt-validate" | 8 | file://dt-validate" |
9 | 9 | ||
10 | S = "${WORKDIR}/sources" | ||
11 | UNPACKDIR = "${S}" | ||
12 | |||
10 | do_install() { | 13 | do_install() { |
11 | install -d ${D}${bindir}/ | 14 | install -d ${D}${bindir}/ |
12 | install -m 755 ${UNPACKDIR}/dt-doc-validate ${D}${bindir}/ | 15 | install -m 755 ${UNPACKDIR}/dt-doc-validate ${D}${bindir}/ |
diff --git a/meta/recipes-kernel/kexec/kexec-tools/0001-x86-linux-setup.c-Use-POSIX-basename-API.patch b/meta/recipes-kernel/kexec/kexec-tools/0001-x86-linux-setup.c-Use-POSIX-basename-API.patch new file mode 100644 index 0000000000..e223f45998 --- /dev/null +++ b/meta/recipes-kernel/kexec/kexec-tools/0001-x86-linux-setup.c-Use-POSIX-basename-API.patch | |||
@@ -0,0 +1,54 @@ | |||
1 | From 32c8ffa7ace6f1b7e63f9ddffab00b00c36a7b57 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Wed, 15 May 2024 21:18:08 -0700 | ||
4 | Subject: [PATCH] x86-linux-setup.c: Use POSIX basename API | ||
5 | |||
6 | Musl C library only supports POSIX basename function. while glibc has | ||
7 | both GNU extention as well as POSIX basename implemented. Switch to | ||
8 | using posix version, so it can work across musl and glibc | ||
9 | |||
10 | basename prototype has been removed from string.h from latest musl [1] | ||
11 | compilers e.g. clang-18/GCC-14 flags the absense of prototype as error. | ||
12 | therefore include libgen.h for providing it. | ||
13 | |||
14 | [1] https://git.musl-libc.org/cgit/musl/commit/?id=725e17ed6dff4d0cd22487bb64470881e86a92e7 | ||
15 | |||
16 | Upstream-Status: Submitted [https://lists.infradead.org/pipermail/kexec/2024-May/030034.html] | ||
17 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
18 | --- | ||
19 | kexec/arch/i386/x86-linux-setup.c | 9 ++++++--- | ||
20 | 1 file changed, 6 insertions(+), 3 deletions(-) | ||
21 | |||
22 | diff --git a/kexec/arch/i386/x86-linux-setup.c b/kexec/arch/i386/x86-linux-setup.c | ||
23 | index 9a281dc..73251b9 100644 | ||
24 | --- a/kexec/arch/i386/x86-linux-setup.c | ||
25 | +++ b/kexec/arch/i386/x86-linux-setup.c | ||
26 | @@ -14,6 +14,7 @@ | ||
27 | * | ||
28 | */ | ||
29 | #define _GNU_SOURCE | ||
30 | +#include <libgen.h> | ||
31 | #include <stdint.h> | ||
32 | #include <stdio.h> | ||
33 | #include <string.h> | ||
34 | @@ -329,12 +330,14 @@ static int add_edd_entry(struct x86_linux_param_header *real_mode, | ||
35 | memset(edd_info, 0, sizeof(struct edd_info)); | ||
36 | |||
37 | /* extract the device number */ | ||
38 | - if (sscanf(basename(sysfs_name), "int13_dev%hhx", &devnum) != 1) { | ||
39 | + char* sysfs_name_copy = strdup(sysfs_name); | ||
40 | + if (sscanf(basename(sysfs_name_copy), "int13_dev%hhx", &devnum) != 1) { | ||
41 | fprintf(stderr, "Invalid format of int13_dev dir " | ||
42 | - "entry: %s\n", basename(sysfs_name)); | ||
43 | + "entry: %s\n", basename(sysfs_name_copy)); | ||
44 | + free(sysfs_name_copy); | ||
45 | return -1; | ||
46 | } | ||
47 | - | ||
48 | + free(sysfs_name_copy); | ||
49 | /* if there's a MBR signature, then add it */ | ||
50 | if (file_scanf(sysfs_name, "mbr_signature", "0x%x", &mbr_sig) == 1) { | ||
51 | real_mode->edd_mbr_sig_buffer[*current_mbr] = mbr_sig; | ||
52 | -- | ||
53 | 2.45.1 | ||
54 | |||
diff --git a/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch b/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch index e874a8b4f1..489b109285 100644 --- a/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch +++ b/meta/recipes-kernel/kexec/kexec-tools/0003-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch | |||
@@ -8,7 +8,7 @@ is used by MMU, the "SECTION_SIZE" is defined with | |||
8 | (1 << 21), but 'add_buffer_phys_virt()' hardcode this | 8 | (1 << 21), but 'add_buffer_phys_virt()' hardcode this |
9 | to (1 << 20). | 9 | to (1 << 20). |
10 | 10 | ||
11 | Upstream-Status: Pending | 11 | Upstream-Status: Submitted [via email to horms@kernel.org,http://lists.infradead.org/pipermail/kexec/2024-April/029903.html] |
12 | 12 | ||
13 | Suggested-By:fredrik.markstrom@gmail.com | 13 | Suggested-By:fredrik.markstrom@gmail.com |
14 | Signed-off-by: Haiqing Bai <Haiqing.Bai@windriver.com> | 14 | Signed-off-by: Haiqing Bai <Haiqing.Bai@windriver.com> |
diff --git a/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb b/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb index c39fff834d..b60c51df4a 100644 --- a/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb +++ b/meta/recipes-kernel/kexec/kexec-tools_2.0.28.bb | |||
@@ -18,6 +18,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/kexec/kexec-tools-${PV}.tar.gz | |||
18 | file://0005-Disable-PIE-during-link.patch \ | 18 | file://0005-Disable-PIE-during-link.patch \ |
19 | file://0001-arm64-kexec-disabled-check-if-kaslr-seed-dtb-propert.patch \ | 19 | file://0001-arm64-kexec-disabled-check-if-kaslr-seed-dtb-propert.patch \ |
20 | file://Fix-building-on-x86_64-with-binutils-2.41.patch \ | 20 | file://Fix-building-on-x86_64-with-binutils-2.41.patch \ |
21 | file://0001-x86-linux-setup.c-Use-POSIX-basename-API.patch \ | ||
21 | " | 22 | " |
22 | 23 | ||
23 | SRC_URI[sha256sum] = "f33d2660b3e38d25a127e87097978e0f7a9a73ab5151a29eb80974d169ff6a29" | 24 | SRC_URI[sha256sum] = "f33d2660b3e38d25a127e87097978e0f7a9a73ab5151a29eb80974d169ff6a29" |
diff --git a/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb b/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb index 6c0739d64f..29f34d7f36 100644 --- a/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb +++ b/meta/recipes-kernel/kmod/depmodwrapper-cross_1.0.bb | |||
@@ -1,7 +1,8 @@ | |||
1 | SUMMARY = "Wrapper script for the Linux kernel module dependency indexer" | 1 | SUMMARY = "Wrapper script for the Linux kernel module dependency indexer" |
2 | LICENSE = "MIT" | 2 | LICENSE = "MIT" |
3 | 3 | ||
4 | S = "${WORKDIR}" | 4 | S = "${WORKDIR}/sources" |
5 | UNPACKDIR = "${S}" | ||
5 | 6 | ||
6 | INHIBIT_DEFAULT_DEPS = "1" | 7 | INHIBIT_DEFAULT_DEPS = "1" |
7 | # The kernel and the staging dir for it is machine specific | 8 | # The kernel and the staging dir for it is machine specific |
diff --git a/meta/recipes-kernel/kmod/kmod_31.bb b/meta/recipes-kernel/kmod/kmod_32.bb index 718a5565b4..1c4e5a94db 100644 --- a/meta/recipes-kernel/kmod/kmod_31.bb +++ b/meta/recipes-kernel/kmod/kmod_32.bb | |||
@@ -15,7 +15,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \ | |||
15 | " | 15 | " |
16 | inherit autotools bash-completion gtk-doc pkgconfig manpages update-alternatives | 16 | inherit autotools bash-completion gtk-doc pkgconfig manpages update-alternatives |
17 | 17 | ||
18 | SRCREV = "aff617ea871d0568cc491bd116c0be1e857463bb" | 18 | SRCREV = "41faa59711742c1476d59985011ee0f27ed91d30" |
19 | 19 | ||
20 | SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git;branch=master;protocol=https \ | 20 | SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git;branch=master;protocol=https \ |
21 | file://depmod-search.conf \ | 21 | file://depmod-search.conf \ |
@@ -52,8 +52,9 @@ do_install:append () { | |||
52 | install -dm755 ${D}${base_bindir} | 52 | install -dm755 ${D}${base_bindir} |
53 | install -dm755 ${D}${base_sbindir} | 53 | install -dm755 ${D}${base_sbindir} |
54 | # add symlinks to kmod | 54 | # add symlinks to kmod |
55 | ln -rs ${D}${base_bindir}/kmod ${D}${base_bindir}/lsmod | 55 | [ -e ${D}${base_bindir}/lsmod ] || ln -rs ${D}${base_bindir}/kmod ${D}${base_bindir}/lsmod |
56 | for tool in insmod rmmod depmod modinfo modprobe; do | 56 | for tool in insmod rmmod depmod modinfo modprobe; do |
57 | rm -f ${D}${base_bindir}/${tool} | ||
57 | ln -rs ${D}${base_bindir}/kmod ${D}${base_sbindir}/${tool} | 58 | ln -rs ${D}${base_bindir}/kmod ${D}${base_sbindir}/${tool} |
58 | done | 59 | done |
59 | # configuration directories | 60 | # configuration directories |
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.9.bb index d68de6ec7e..3dd4d547d5 100644 --- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.6.bb +++ b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_6.9.bb | |||
@@ -12,6 +12,6 @@ SRC_URI += "\ | |||
12 | 12 | ||
13 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" | 13 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" |
14 | 14 | ||
15 | SRC_URI[sha256sum] = "d926a06c63dd8ac7df3f86ee1ffc2ce2a3b81a2d168484e76b5b389aba8e56d0" | 15 | SRC_URI[sha256sum] = "24fa01fb989c7a3e28453f117799168713766e119c5381dac30115f18f268149" |
16 | 16 | ||
17 | 17 | ||
diff --git a/meta/recipes-kernel/linux/linux-dummy.bb b/meta/recipes-kernel/linux/linux-dummy.bb index 2396f46202..3e9a563cad 100644 --- a/meta/recipes-kernel/linux/linux-dummy.bb +++ b/meta/recipes-kernel/linux/linux-dummy.bb | |||
@@ -30,7 +30,9 @@ COMPATIBLE_HOST = ".*-linux" | |||
30 | 30 | ||
31 | 31 | ||
32 | SRC_URI = "file://COPYING.GPL" | 32 | SRC_URI = "file://COPYING.GPL" |
33 | S = "${WORKDIR}" | 33 | |
34 | S = "${WORKDIR}/sources" | ||
35 | UNPACKDIR = "${S}" | ||
34 | 36 | ||
35 | do_configure() { | 37 | do_configure() { |
36 | : | 38 | : |
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb b/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb index 53f413407d..f8d47a9dba 100644 --- a/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb +++ b/meta/recipes-kernel/linux/linux-yocto-rt_6.6.bb | |||
@@ -14,13 +14,13 @@ python () { | |||
14 | raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") | 14 | raise bb.parse.SkipRecipe("Set PREFERRED_PROVIDER_virtual/kernel to linux-yocto-rt to enable it") |
15 | } | 15 | } |
16 | 16 | ||
17 | SRCREV_machine ?= "9bdb085cb6fcfd49586b980ba27cb81e5c76689e" | 17 | SRCREV_machine ?= "59b2635b04e2ef8162e52f82e848b81073cea708" |
18 | SRCREV_meta ?= "a78c74a3510067017dda1926f88bd914f0a053b3" | 18 | SRCREV_meta ?= "66bebb6789d02e775d4c93d7ca4bf79c2ead4b28" |
19 | 19 | ||
20 | SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine;protocol=https \ | 20 | SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;branch=${KBRANCH};name=machine;protocol=https \ |
21 | git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https" | 21 | git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https" |
22 | 22 | ||
23 | LINUX_VERSION ?= "6.6.29" | 23 | LINUX_VERSION ?= "6.6.32" |
24 | 24 | ||
25 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" | 25 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" |
26 | 26 | ||
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb index 44c9a7b1e0..7378a37521 100644 --- a/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb +++ b/meta/recipes-kernel/linux/linux-yocto-tiny_6.6.bb | |||
@@ -8,7 +8,7 @@ require recipes-kernel/linux/linux-yocto.inc | |||
8 | # CVE exclusions | 8 | # CVE exclusions |
9 | include recipes-kernel/linux/cve-exclusion_6.6.inc | 9 | include recipes-kernel/linux/cve-exclusion_6.6.inc |
10 | 10 | ||
11 | LINUX_VERSION ?= "6.6.29" | 11 | LINUX_VERSION ?= "6.6.32" |
12 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" | 12 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" |
13 | 13 | ||
14 | DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}" | 14 | DEPENDS += "${@bb.utils.contains('ARCH', 'x86', 'elfutils-native', '', d)}" |
@@ -17,8 +17,8 @@ DEPENDS += "openssl-native util-linux-native" | |||
17 | KMETA = "kernel-meta" | 17 | KMETA = "kernel-meta" |
18 | KCONF_BSP_AUDIT_LEVEL = "2" | 18 | KCONF_BSP_AUDIT_LEVEL = "2" |
19 | 19 | ||
20 | SRCREV_machine ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 20 | SRCREV_machine ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
21 | SRCREV_meta ?= "a78c74a3510067017dda1926f88bd914f0a053b3" | 21 | SRCREV_meta ?= "66bebb6789d02e775d4c93d7ca4bf79c2ead4b28" |
22 | 22 | ||
23 | PV = "${LINUX_VERSION}+git" | 23 | PV = "${LINUX_VERSION}+git" |
24 | 24 | ||
diff --git a/meta/recipes-kernel/linux/linux-yocto_6.6.bb b/meta/recipes-kernel/linux/linux-yocto_6.6.bb index 817639bce0..fdb7702594 100644 --- a/meta/recipes-kernel/linux/linux-yocto_6.6.bb +++ b/meta/recipes-kernel/linux/linux-yocto_6.6.bb | |||
@@ -18,25 +18,25 @@ KBRANCH:qemux86-64 ?= "v6.6/standard/base" | |||
18 | KBRANCH:qemuloongarch64 ?= "v6.6/standard/base" | 18 | KBRANCH:qemuloongarch64 ?= "v6.6/standard/base" |
19 | KBRANCH:qemumips64 ?= "v6.6/standard/mti-malta64" | 19 | KBRANCH:qemumips64 ?= "v6.6/standard/mti-malta64" |
20 | 20 | ||
21 | SRCREV_machine:qemuarm ?= "0b90bbe1359b3fd590780119f19dbd0a01e58560" | 21 | SRCREV_machine:qemuarm ?= "6e4ec0ec5052e3a107ec7e5977ea9282d3642ea7" |
22 | SRCREV_machine:qemuarm64 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 22 | SRCREV_machine:qemuarm64 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
23 | SRCREV_machine:qemuloongarch64 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 23 | SRCREV_machine:qemuloongarch64 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
24 | SRCREV_machine:qemumips ?= "ab44de80a320e30a49150ea74554a937affaf78d" | 24 | SRCREV_machine:qemumips ?= "cab976b23497344b74b7e4cbcb5df732f8630150" |
25 | SRCREV_machine:qemuppc ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 25 | SRCREV_machine:qemuppc ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
26 | SRCREV_machine:qemuriscv64 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 26 | SRCREV_machine:qemuriscv64 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
27 | SRCREV_machine:qemuriscv32 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 27 | SRCREV_machine:qemuriscv32 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
28 | SRCREV_machine:qemux86 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 28 | SRCREV_machine:qemux86 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
29 | SRCREV_machine:qemux86-64 ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 29 | SRCREV_machine:qemux86-64 ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
30 | SRCREV_machine:qemumips64 ?= "75da5d44c2e54a85a1221ea7d0c04be9aefb7fa1" | 30 | SRCREV_machine:qemumips64 ?= "aa0c0197b3a0628992e959708a2ad015603e93ad" |
31 | SRCREV_machine ?= "7c76aad68f6d1247e72f024f1e0291067a404f8d" | 31 | SRCREV_machine ?= "9576b5b9f8e3c78e6c315f475def18e5c29e475a" |
32 | SRCREV_meta ?= "a78c74a3510067017dda1926f88bd914f0a053b3" | 32 | SRCREV_meta ?= "66bebb6789d02e775d4c93d7ca4bf79c2ead4b28" |
33 | 33 | ||
34 | # set your preferred provider of linux-yocto to 'linux-yocto-upstream', and you'll | 34 | # set your preferred provider of linux-yocto to 'linux-yocto-upstream', and you'll |
35 | # get the <version>/base branch, which is pure upstream -stable, and the same | 35 | # get the <version>/base branch, which is pure upstream -stable, and the same |
36 | # meta SRCREV as the linux-yocto-standard builds. Select your version using the | 36 | # meta SRCREV as the linux-yocto-standard builds. Select your version using the |
37 | # normal PREFERRED_VERSION settings. | 37 | # normal PREFERRED_VERSION settings. |
38 | BBCLASSEXTEND = "devupstream:target" | 38 | BBCLASSEXTEND = "devupstream:target" |
39 | SRCREV_machine:class-devupstream ?= "a3463f08104612fc979c41fa54733e925205d3d7" | 39 | SRCREV_machine:class-devupstream ?= "91de249b6804473d49984030836381c3b9b3cfb0" |
40 | PN:class-devupstream = "linux-yocto-upstream" | 40 | PN:class-devupstream = "linux-yocto-upstream" |
41 | KBRANCH:class-devupstream = "v6.6/base" | 41 | KBRANCH:class-devupstream = "v6.6/base" |
42 | 42 | ||
@@ -44,7 +44,7 @@ SRC_URI = "git://git.yoctoproject.org/linux-yocto.git;name=machine;branch=${KBRA | |||
44 | git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https" | 44 | git://git.yoctoproject.org/yocto-kernel-cache;type=kmeta;name=meta;branch=yocto-6.6;destsuffix=${KMETA};protocol=https" |
45 | 45 | ||
46 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" | 46 | LIC_FILES_CHKSUM = "file://COPYING;md5=6bc538ed5bd9a7fc9398086aedcd7e46" |
47 | LINUX_VERSION ?= "6.6.29" | 47 | LINUX_VERSION ?= "6.6.32" |
48 | 48 | ||
49 | PV = "${LINUX_VERSION}+git" | 49 | PV = "${LINUX_VERSION}+git" |
50 | 50 | ||
@@ -64,6 +64,8 @@ KERNEL_FEATURES:append:qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc" | |||
64 | KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}" | 64 | KERNEL_FEATURES:append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "", d)}" |
65 | KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc features/nf_tables/nft_test.scc", "", d)}" | 65 | KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/scsi/scsi-debug.scc features/nf_tables/nft_test.scc", "", d)}" |
66 | KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc features/gpio/sim.scc", "", d)}" | 66 | KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/gpio/mockup.scc features/gpio/sim.scc", "", d)}" |
67 | # libteam ptests from meta-oe needs it | ||
68 | KERNEL_FEATURES:append = " ${@bb.utils.contains("DISTRO_FEATURES", "ptest", " features/net/team/team.scc", "", d)}" | ||
67 | KERNEL_FEATURES:append:powerpc =" arch/powerpc/powerpc-debug.scc" | 69 | KERNEL_FEATURES:append:powerpc =" arch/powerpc/powerpc-debug.scc" |
68 | KERNEL_FEATURES:append:powerpc64 =" arch/powerpc/powerpc-debug.scc" | 70 | KERNEL_FEATURES:append:powerpc64 =" arch/powerpc/powerpc-debug.scc" |
69 | KERNEL_FEATURES:append:powerpc64le =" arch/powerpc/powerpc-debug.scc" | 71 | KERNEL_FEATURES:append:powerpc64le =" arch/powerpc/powerpc-debug.scc" |
diff --git a/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb b/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb index a91680d497..581e90e9b4 100644 --- a/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb +++ b/meta/recipes-kernel/make-mod-scripts/make-mod-scripts_1.0.bb | |||
@@ -8,7 +8,8 @@ inherit pkgconfig | |||
8 | 8 | ||
9 | PACKAGE_ARCH = "${MACHINE_ARCH}" | 9 | PACKAGE_ARCH = "${MACHINE_ARCH}" |
10 | 10 | ||
11 | S = "${WORKDIR}" | 11 | S = "${WORKDIR}/sources" |
12 | UNPACKDIR = "${S}" | ||
12 | 13 | ||
13 | do_configure[depends] += "virtual/kernel:do_shared_workdir openssl-native:do_populate_sysroot" | 14 | do_configure[depends] += "virtual/kernel:do_shared_workdir openssl-native:do_populate_sysroot" |
14 | do_compile[depends] += "virtual/kernel:do_compile_kernelmodules" | 15 | do_compile[depends] += "virtual/kernel:do_compile_kernelmodules" |
diff --git a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb index 0a05770408..49268445a8 100644 --- a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb +++ b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb | |||
@@ -4,8 +4,8 @@ LICENSE = "MIT" | |||
4 | LIC_FILES_CHKSUM = "file://modutils.sh;beginline=3;endline=3;md5=b2dccaa94b3629a08bfb4f983cad6f89" | 4 | LIC_FILES_CHKSUM = "file://modutils.sh;beginline=3;endline=3;md5=b2dccaa94b3629a08bfb4f983cad6f89" |
5 | SRC_URI = "file://modutils.sh" | 5 | SRC_URI = "file://modutils.sh" |
6 | 6 | ||
7 | 7 | S = "${WORKDIR}/sources" | |
8 | S = "${WORKDIR}" | 8 | UNPACKDIR = "${S}" |
9 | 9 | ||
10 | INITSCRIPT_NAME = "modutils.sh" | 10 | INITSCRIPT_NAME = "modutils.sh" |
11 | INITSCRIPT_PARAMS = "start 06 S ." | 11 | INITSCRIPT_PARAMS = "start 06 S ." |
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch b/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch deleted file mode 100644 index 2d46a3962d..0000000000 --- a/meta/recipes-kernel/systemtap/systemtap/0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch +++ /dev/null | |||
@@ -1,40 +0,0 @@ | |||
1 | From 5a01e28bd806326b2143e3e6bb28d4780c5d879d Mon Sep 17 00:00:00 2001 | ||
2 | From: Victor Kamensky <victor.kamensky7@gmail.com> | ||
3 | Date: Sun, 3 Dec 2023 18:40:05 -0800 | ||
4 | Subject: [PATCH] Makefile.am: remove runtime/linux/uprobes and | ||
5 | runtime/linux/uprobes2 install | ||
6 | |||
7 | "PR30434 continuation: Removed old uprobes, uprobes2 implementation, | ||
8 | uprobes-inc.h & any mentions of CONFIG_UTRACE." commit removed uprobes, | ||
9 | and uprobes2 sources and directories, but Makefile.am still tries to | ||
10 | install them. In fact after failing to 'cd' into runtime/linux/uprobes | ||
11 | directory it copies top level *.[ch] files into | ||
12 | ${prefix}/share/systemtap/runtime/linux/uprobes directory. | ||
13 | |||
14 | The issue was caught by OpenEmbedded project do_package_qa checks. | ||
15 | |||
16 | Signed-off-by: Victor Kamensky <victor.kamensky7@gmail.com> | ||
17 | |||
18 | Upstream-Status: Submitted [https://sourceware.org/pipermail/systemtap/2023q4/027880.html] | ||
19 | --- | ||
20 | Makefile.am | 4 ---- | ||
21 | 1 file changed, 4 deletions(-) | ||
22 | |||
23 | diff --git a/Makefile.am b/Makefile.am | ||
24 | index 5737c6b20..2ba896088 100644 | ||
25 | --- a/Makefile.am | ||
26 | +++ b/Makefile.am | ||
27 | @@ -277,10 +277,6 @@ endif | ||
28 | do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/softfloat/$$f; done) | ||
29 | (cd $(srcdir)/runtime/linux; for f in *.[ch]; \ | ||
30 | do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/$$f; done) | ||
31 | - (cd $(srcdir)/runtime/linux/uprobes; for f in Makefile *.[ch]; \ | ||
32 | - do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/uprobes/$$f; done) | ||
33 | - (cd $(srcdir)/runtime/linux/uprobes2; for f in *.[ch]; \ | ||
34 | - do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/runtime/linux/uprobes2/$$f; done) | ||
35 | (cd $(srcdir)/tapset; find . \( -name '*.stp' -o -name '*.stpm' -o -name README \) -print \ | ||
36 | | while read f; do $(INSTALL_DATA) -D $$f $(DESTDIR)$(pkgdatadir)/tapset/$$f; done) | ||
37 | (cd $(srcdir)/testsuite/systemtap.examples; find . -type f -print \ | ||
38 | -- | ||
39 | 2.31.1 | ||
40 | |||
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch b/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch deleted file mode 100644 index e3d94d9312..0000000000 --- a/meta/recipes-kernel/systemtap/systemtap/0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch +++ /dev/null | |||
@@ -1,40 +0,0 @@ | |||
1 | From d42139cf9cd26d0c0363fcfe007716baeb8de517 Mon Sep 17 00:00:00 2001 | ||
2 | From: Sergei Trofimovich <slyich@gmail.com> | ||
3 | Date: Fri, 22 Dec 2023 19:42:38 +0000 | ||
4 | Subject: [PATCH] bpf-translate.cxx: fix build against upcoming `gcc-14` | ||
5 | (`-Werror=calloc-transposed-args`) | ||
6 | |||
7 | `gcc-14` added a new `-Wcalloc-transposed-args` warning recently. It | ||
8 | detected minor infelicity in `calloc()` API usage in `systemtap`: | ||
9 | |||
10 | bpf-translate.cxx: In function 'bpf::BPF_Section* bpf::output_probe(BPF_Output&, program&, const std::string&, unsigned int)': | ||
11 | bpf-translate.cxx:5044:39: error: 'void* calloc(size_t, size_t)' sizes specified with 'sizeof' in the earlier argument and not in the later argument [-Werror=calloc-transposed-args] | ||
12 | 5044 | bpf_insn *buf = (bpf_insn*) calloc (sizeof(bpf_insn), ninsns); | ||
13 | | ^~~~~~~~~~~~~~~~ | ||
14 | bpf-translate.cxx:5044:39: note: earlier argument should specify number of elements, later size of each element | ||
15 | |||
16 | Upstream-Status: Backport [https://sourceware.org/git/?p=systemtap.git;a=commit;h=d42139cf9cd26d0c0363fcfe007716baeb8de517] | ||
17 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
18 | --- | ||
19 | bpf-translate.cxx | 4 ++-- | ||
20 | 1 file changed, 2 insertions(+), 2 deletions(-) | ||
21 | |||
22 | diff --git a/bpf-translate.cxx b/bpf-translate.cxx | ||
23 | index 1a9302463..aa8ef65ce 100644 | ||
24 | --- a/bpf-translate.cxx | ||
25 | +++ b/bpf-translate.cxx | ||
26 | @@ -5041,9 +5041,9 @@ output_probe(BPF_Output &eo, program &prog, | ||
27 | } | ||
28 | } | ||
29 | |||
30 | - bpf_insn *buf = (bpf_insn*) calloc (sizeof(bpf_insn), ninsns); | ||
31 | + bpf_insn *buf = (bpf_insn*) calloc (ninsns, sizeof(bpf_insn)); | ||
32 | assert (buf); | ||
33 | - Elf64_Rel *rel = (Elf64_Rel*) calloc (sizeof(Elf64_Rel), nreloc); | ||
34 | + Elf64_Rel *rel = (Elf64_Rel*) calloc (nreloc, sizeof(Elf64_Rel)); | ||
35 | assert (rel); | ||
36 | |||
37 | unsigned i = 0, r = 0; | ||
38 | -- | ||
39 | 2.43.0 | ||
40 | |||
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch b/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch deleted file mode 100644 index 98641826f6..0000000000 --- a/meta/recipes-kernel/systemtap/systemtap/0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch +++ /dev/null | |||
@@ -1,51 +0,0 @@ | |||
1 | From 3913ad3e28a19811e1b52338112344a487057e4f Mon Sep 17 00:00:00 2001 | ||
2 | From: Victor Kamensky <victor.kamensky7@gmail.com> | ||
3 | Date: Mon, 18 Dec 2023 03:13:38 +0000 | ||
4 | Subject: [PATCH 1/2] configure.ac: fix broken libdebuginfod library auto | ||
5 | detection | ||
6 | |||
7 | After 2e67b053e3796ee7cf29a39f9698729b52078406 "configury: rework debuginfod searches" | ||
8 | commit, libdebuginfod.so library auto detection is broken. It was reported by Martin Jansa | ||
9 | on openembedded-core mailing list [1]. | ||
10 | |||
11 | Currently configure.ac does "AC_DEFINE([HAVE_LIBDEBUGINFOD], [1] ..." as long as | ||
12 | no --without-debuginfod option is passed, regardless PKG_CHECK_MODULES check result. | ||
13 | It seems to be bad copy/paste. Address the issue by moving the AC_DEFINE back to | ||
14 | PKG_CHECK_MODULES action-if-found block. | ||
15 | |||
16 | To reproduce the issue on FC system, one can do the following | ||
17 | "sudo dnf remove elfutils-debuginfod-client-devel" and then try to build SystemTap | ||
18 | util.cxx will fail to compile because of missing elfutils/debuginfod.h because | ||
19 | config.h will have "#define HAVE_LIBDEBUGINFOD 1", while config.log and configure | ||
20 | output indicates that check for libdebuginfod library failed. | ||
21 | |||
22 | [1] https://lists.openembedded.org/g/openembedded-core/message/192109?p=%2C%2C%2C20%2C0%2C0%2C0%3A%3Acreated%2C0%2Csystemtap%2C20%2C2%2C0%2C102987514 | ||
23 | |||
24 | Upstream-Status: Submitted [https://sourceware.org/pipermail/systemtap/2023q4/027914.html] | ||
25 | Signed-off-by: Victor Kamensky <victor.kamensky7@gmail.com> | ||
26 | --- | ||
27 | configure.ac | 5 ++--- | ||
28 | 1 file changed, 2 insertions(+), 3 deletions(-) | ||
29 | |||
30 | diff --git a/configure.ac b/configure.ac | ||
31 | index d9559c5c3..18cd7f84a 100644 | ||
32 | --- a/configure.ac | ||
33 | +++ b/configure.ac | ||
34 | @@ -219,12 +219,11 @@ dnl take the user at his or her word | ||
35 | elif test "x$with_debuginfod" != xno; then | ||
36 | dnl check in the system pkgconfig | ||
37 | PKG_CHECK_MODULES([debuginfod], [libdebuginfod >= 0.179], | ||
38 | - [have_debuginfod=1], | ||
39 | + [have_debuginfod=1 | ||
40 | + AC_DEFINE([HAVE_LIBDEBUGINFOD], [1], [Define to 1 if debuginfod is enabled.])], | ||
41 | [if test "x$with_debuginfod" = xyes; then | ||
42 | AC_MSG_ERROR(["--with-debuginfod was given, but libdebuginfod is missing or unusable."]) | ||
43 | fi]) | ||
44 | - AC_DEFINE([HAVE_LIBDEBUGINFOD], [1], [Define to 1 if debuginfod is enabled.]) | ||
45 | - AC_MSG_RESULT([yes]) | ||
46 | else | ||
47 | AC_MSG_RESULT([no]) | ||
48 | fi | ||
49 | -- | ||
50 | 2.31.1 | ||
51 | |||
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch b/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch deleted file mode 100644 index afdc10a3fe..0000000000 --- a/meta/recipes-kernel/systemtap/systemtap/0001-prerelease-datestamp-fixes.patch +++ /dev/null | |||
@@ -1,23 +0,0 @@ | |||
1 | From cfc2c1d53924face11e3fab78ded61c359778eb9 Mon Sep 17 00:00:00 2001 | ||
2 | From: "Frank Ch. Eigler" <fche@redhat.com> | ||
3 | Date: Sat, 4 Nov 2023 12:19:59 -0400 | ||
4 | Subject: [PATCH] prerelease datestamp fixes | ||
5 | |||
6 | Upstream-Status: Backport | ||
7 | --- | ||
8 | NEWS | 2 +- | ||
9 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
10 | |||
11 | diff --git a/NEWS b/NEWS | ||
12 | index c7f31e8b7..f0f9056c4 100644 | ||
13 | --- a/NEWS | ||
14 | +++ b/NEWS | ||
15 | @@ -1,4 +1,4 @@ | ||
16 | -* What's new in version 5.0, 2023-11-03 | ||
17 | +* What's new in version 5.0, 2023-11-04 | ||
18 | |||
19 | - Performance improvements in uprobe registration and module startup. | ||
20 | |||
21 | -- | ||
22 | 2.31.1 | ||
23 | |||
diff --git a/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch b/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch deleted file mode 100644 index 22578fb3f6..0000000000 --- a/meta/recipes-kernel/systemtap/systemtap/0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch +++ /dev/null | |||
@@ -1,36 +0,0 @@ | |||
1 | From 52596f023652114642faba5726c99488529029ce Mon Sep 17 00:00:00 2001 | ||
2 | From: Sergei Trofimovich <slyich@gmail.com> | ||
3 | Date: Thu, 21 Dec 2023 10:00:06 +0000 | ||
4 | Subject: [PATCH] staprun: fix build against upcoming `gcc-14` | ||
5 | (`-Werror=calloc-transposed-args`) | ||
6 | |||
7 | `gcc-14` added a new `-Wcalloc-transposed-args` warning recently. It | ||
8 | detected minor infelicity in `calloc()` API usage in `systemtap`: | ||
9 | |||
10 | staprun.c: In function 'main': | ||
11 | staprun.c:550:50: error: 'calloc' sizes specified with 'sizeof' in the earlier argument and not in the later argument [-Werror=calloc-transposed-args] | ||
12 | 550 | char ** new_argv = calloc(sizeof(char *),argc+2); | ||
13 | | ^~~~ | ||
14 | |||
15 | Upstream-Status: Backport [https://sourceware.org/git/?p=systemtap.git;a=commit;h=52596f023652114642faba5726c99488529029ce] | ||
16 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
17 | --- | ||
18 | staprun/staprun.c | 2 +- | ||
19 | 1 file changed, 1 insertion(+), 1 deletion(-) | ||
20 | |||
21 | diff --git a/staprun/staprun.c b/staprun/staprun.c | ||
22 | index 8437f3af6..d1b0b221b 100644 | ||
23 | --- a/staprun/staprun.c | ||
24 | +++ b/staprun/staprun.c | ||
25 | @@ -547,7 +547,7 @@ int main(int argc, char **argv) | ||
26 | us to extend argv[], with all the C fun that entails. */ | ||
27 | #ifdef HAVE_OPENAT | ||
28 | if (relay_basedir_fd >= 0) { | ||
29 | - char ** new_argv = calloc(sizeof(char *),argc+2); | ||
30 | + char ** new_argv = calloc(argc+2, sizeof(char *)); | ||
31 | const int new_Foption_size = 10; /* -FNNNNN */ | ||
32 | char * new_Foption = malloc(new_Foption_size); | ||
33 | int i; | ||
34 | -- | ||
35 | 2.43.0 | ||
36 | |||
diff --git a/meta/recipes-kernel/systemtap/systemtap_git.inc b/meta/recipes-kernel/systemtap/systemtap_git.inc index c574bcb2ba..cc9fc81430 100644 --- a/meta/recipes-kernel/systemtap/systemtap_git.inc +++ b/meta/recipes-kernel/systemtap/systemtap_git.inc | |||
@@ -1,17 +1,12 @@ | |||
1 | LICENSE = "GPL-2.0-only" | 1 | LICENSE = "GPL-2.0-only" |
2 | LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" | 2 | LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" |
3 | SRCREV = "e72dc118e563c645d93a1a2d771e8d90e8fec1ae" | 3 | SRCREV = "3a0c9c15163520dd0d9ab07177da62ce9cc2332f" |
4 | PV = "5.0" | 4 | PV = "5.1" |
5 | 5 | ||
6 | SRC_URI = "git://sourceware.org/git/systemtap.git;branch=master;protocol=https \ | 6 | SRC_URI = "git://sourceware.org/git/systemtap.git;branch=master;protocol=https \ |
7 | file://0001-Do-not-let-configure-write-a-python-location-into-th.patch \ | 7 | file://0001-Do-not-let-configure-write-a-python-location-into-th.patch \ |
8 | file://0001-Install-python-modules-to-correct-library-dir.patch \ | 8 | file://0001-Install-python-modules-to-correct-library-dir.patch \ |
9 | file://0001-staprun-stapbpf-don-t-support-installing-a-non-root.patch \ | 9 | file://0001-staprun-stapbpf-don-t-support-installing-a-non-root.patch \ |
10 | file://0001-Makefile.am-remove-runtime-linux-uprobes-and-runtime.patch \ | ||
11 | file://0001-prerelease-datestamp-fixes.patch \ | ||
12 | file://0001-configure.ac-fix-broken-libdebuginfod-library-auto-d.patch \ | ||
13 | file://0001-bpf-translate.cxx-fix-build-against-upcoming-gcc-14-.patch \ | ||
14 | file://0001-staprun-fix-build-against-upcoming-gcc-14-Werror-cal.patch \ | ||
15 | " | 10 | " |
16 | 11 | ||
17 | COMPATIBLE_HOST = '(x86_64|i.86|powerpc|arm|aarch64|microblazeel|mips|riscv64).*-linux' | 12 | COMPATIBLE_HOST = '(x86_64|i.86|powerpc|arm|aarch64|microblazeel|mips|riscv64).*-linux' |
diff --git a/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb b/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.05.08.bb index 8fde236ab4..95e33d9fb1 100644 --- a/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.01.23.bb +++ b/meta/recipes-kernel/wireless-regdb/wireless-regdb_2024.05.08.bb | |||
@@ -5,7 +5,7 @@ LICENSE = "ISC" | |||
5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=07c4f6dea3845b02a18dc00c8c87699c" | 5 | LIC_FILES_CHKSUM = "file://LICENSE;md5=07c4f6dea3845b02a18dc00c8c87699c" |
6 | 6 | ||
7 | SRC_URI = "https://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.xz" | 7 | SRC_URI = "https://www.kernel.org/pub/software/network/${BPN}/${BP}.tar.xz" |
8 | SRC_URI[sha256sum] = "c8a61c9acf76fa7eb4239e89f640dee3e87098d9f69b4d3518c9c60fc6d20c55" | 8 | SRC_URI[sha256sum] = "9aee1d86ebebb363b714bec941b2820f31e3b7f1a485ddc9fcbd9985c7d3e7c4" |
9 | 9 | ||
10 | inherit bin_package allarch | 10 | inherit bin_package allarch |
11 | 11 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch b/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch index 21e30d6d46..8520567f17 100644 --- a/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch +++ b/meta/recipes-multimedia/gstreamer/gst-devtools/0001-connect-has-a-different-signature-on-musl.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 0c73b8131efba63c5cd37ea8c7551434c3b57304 Mon Sep 17 00:00:00 2001 | 1 | From c20894a5e6f52aa9fe3437e733a228b1a0eb6428 Mon Sep 17 00:00:00 2001 |
2 | From: Khem Raj <raj.khem@gmail.com> | 2 | From: Khem Raj <raj.khem@gmail.com> |
3 | Date: Sun, 9 Sep 2018 17:38:10 -0700 | 3 | Date: Sun, 9 Sep 2018 17:38:10 -0700 |
4 | Subject: [PATCH] connect has a different signature on musl | 4 | Subject: [PATCH] connect has a different signature on musl |
@@ -11,7 +11,6 @@ typcasted to struct sockaddr_in* type inside the function before use | |||
11 | Upstream-Status: Pending | 11 | Upstream-Status: Pending |
12 | 12 | ||
13 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 13 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
14 | |||
15 | --- | 14 | --- |
16 | validate/plugins/fault_injection/socket_interposer.c | 7 ++++++- | 15 | validate/plugins/fault_injection/socket_interposer.c | 7 ++++++- |
17 | 1 file changed, 6 insertions(+), 1 deletion(-) | 16 | 1 file changed, 6 insertions(+), 1 deletion(-) |
diff --git a/meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gst-devtools_1.24.3.bb index 2be406192f..9df802afe9 100644 --- a/meta/recipes-multimedia/gstreamer/gst-devtools_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gst-devtools_1.24.3.bb | |||
@@ -12,7 +12,7 @@ SRC_URI = "https://gstreamer.freedesktop.org/src/gst-devtools/gst-devtools-${PV} | |||
12 | file://0001-connect-has-a-different-signature-on-musl.patch \ | 12 | file://0001-connect-has-a-different-signature-on-musl.patch \ |
13 | " | 13 | " |
14 | 14 | ||
15 | SRC_URI[sha256sum] = "07766425ecb5bf857ab5ad3962321c55cd89f9386b720843f9df71c0a455eb9b" | 15 | SRC_URI[sha256sum] = "b91114a2fd958f42acf441186e87e2bec93538ef35a9f8248111197360ffb237" |
16 | 16 | ||
17 | DEPENDS = "json-glib glib-2.0 glib-2.0-native gstreamer1.0 gstreamer1.0-plugins-base" | 17 | DEPENDS = "json-glib glib-2.0 glib-2.0-native gstreamer1.0 gstreamer1.0-plugins-base" |
18 | RRECOMMENDS:${PN} = "git" | 18 | RRECOMMENDS:${PN} = "git" |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.24.3.bb index f3287efa96..d468f2983c 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.24.3.bb | |||
@@ -12,7 +12,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=69333daa044cb77e486cc36129f7a770 \ | |||
12 | " | 12 | " |
13 | 13 | ||
14 | SRC_URI = "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz" | 14 | SRC_URI = "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz" |
15 | SRC_URI[sha256sum] = "6b13dcc9332ef27a7c1e7005c0196883874f91622f8aa6e52f218b05b15d2bf5" | 15 | SRC_URI[sha256sum] = "d9c5b152468a45c1fa8351410422090a7192707ad74d2e1a4367f5254e188d91" |
16 | 16 | ||
17 | S = "${WORKDIR}/gst-libav-${PV}" | 17 | S = "${WORKDIR}/gst-libav-${PV}" |
18 | 18 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb deleted file mode 100644 index 97348fb398..0000000000 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.22.11.bb +++ /dev/null | |||
@@ -1,47 +0,0 @@ | |||
1 | SUMMARY = "OpenMAX IL plugins for GStreamer" | ||
2 | DESCRIPTION = "Wraps available OpenMAX IL components and makes them available as standard GStreamer elements." | ||
3 | HOMEPAGE = "http://gstreamer.freedesktop.org/" | ||
4 | SECTION = "multimedia" | ||
5 | |||
6 | LICENSE = "LGPL-2.1-or-later" | ||
7 | LICENSE_FLAGS = "commercial" | ||
8 | LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c \ | ||
9 | file://omx/gstomx.h;beginline=1;endline=21;md5=5c8e1fca32704488e76d2ba9ddfa935f" | ||
10 | |||
11 | SRC_URI = "https://gstreamer.freedesktop.org/src/gst-omx/gst-omx-${PV}.tar.xz" | ||
12 | |||
13 | SRC_URI[sha256sum] = "18dfdf5f6b773d67e62a315c6cf6247da320b83603a5819493f53c69ed2eeef6" | ||
14 | |||
15 | S = "${WORKDIR}/gst-omx-${PV}" | ||
16 | |||
17 | DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad virtual/libomxil" | ||
18 | |||
19 | inherit meson pkgconfig upstream-version-is-even | ||
20 | |||
21 | GSTREAMER_1_0_OMX_TARGET ?= "bellagio" | ||
22 | GSTREAMER_1_0_OMX_CORE_NAME ?= "${libdir}/libomxil-bellagio.so.0" | ||
23 | |||
24 | EXTRA_OEMESON += "-Dtarget=${GSTREAMER_1_0_OMX_TARGET}" | ||
25 | |||
26 | python __anonymous () { | ||
27 | omx_target = d.getVar("GSTREAMER_1_0_OMX_TARGET") | ||
28 | if omx_target in ['generic', 'bellagio']: | ||
29 | # Bellagio headers are incomplete (they are missing the OMX_VERSION_MAJOR,# | ||
30 | # OMX_VERSION_MINOR, OMX_VERSION_REVISION, and OMX_VERSION_STEP macros); | ||
31 | # appending a directory path to gst-omx' internal OpenMAX IL headers fixes this | ||
32 | d.appendVar("CFLAGS", " -I${S}/omx/openmax") | ||
33 | elif omx_target == "rpi": | ||
34 | # Dedicated Raspberry Pi OpenMAX IL support makes this package machine specific | ||
35 | d.setVar("PACKAGE_ARCH", d.getVar("MACHINE_ARCH")) | ||
36 | } | ||
37 | |||
38 | set_omx_core_name() { | ||
39 | sed -i -e "s;^core-name=.*;core-name=${GSTREAMER_1_0_OMX_CORE_NAME};" "${D}${sysconfdir}/xdg/gstomx.conf" | ||
40 | } | ||
41 | do_install[postfuncs] += " set_omx_core_name " | ||
42 | |||
43 | FILES:${PN} += "${libdir}/gstreamer-1.0/*.so" | ||
44 | FILES:${PN}-staticdev += "${libdir}/gstreamer-1.0/*.a" | ||
45 | |||
46 | VIRTUAL-RUNTIME_libomxil ?= "libomxil" | ||
47 | RDEPENDS:${PN} = "${VIRTUAL-RUNTIME_libomxil}" | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch index a57fcd7d21..13a481485a 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-fix-maybe-uninitialized-warnings-when-compiling-with.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 8be0c6ac60c96e87d8aa443be9c431844dc3d92a Mon Sep 17 00:00:00 2001 | 1 | From bb7f964d51de6b888b3b7a6d0c3d260fcad24008 Mon Sep 17 00:00:00 2001 |
2 | From: Andre McCurdy <armccurdy@gmail.com> | 2 | From: Andre McCurdy <armccurdy@gmail.com> |
3 | Date: Tue, 26 Jan 2016 15:16:01 -0800 | 3 | Date: Tue, 26 Jan 2016 15:16:01 -0800 |
4 | Subject: [PATCH] fix maybe-uninitialized warnings when compiling with -Os | 4 | Subject: [PATCH] fix maybe-uninitialized warnings when compiling with -Os |
@@ -6,13 +6,12 @@ Subject: [PATCH] fix maybe-uninitialized warnings when compiling with -Os | |||
6 | Upstream-Status: Pending | 6 | Upstream-Status: Pending |
7 | 7 | ||
8 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> | 8 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> |
9 | |||
10 | --- | 9 | --- |
11 | gst-libs/gst/codecparsers/gstvc1parser.c | 2 +- | 10 | gst-libs/gst/codecparsers/gstvc1parser.c | 2 +- |
12 | 1 file changed, 1 insertion(+), 1 deletion(-) | 11 | 1 file changed, 1 insertion(+), 1 deletion(-) |
13 | 12 | ||
14 | diff --git a/gst-libs/gst/codecparsers/gstvc1parser.c b/gst-libs/gst/codecparsers/gstvc1parser.c | 13 | diff --git a/gst-libs/gst/codecparsers/gstvc1parser.c b/gst-libs/gst/codecparsers/gstvc1parser.c |
15 | index 2c60ced..e8226d8 100644 | 14 | index f9af175..6661e2e 100644 |
16 | --- a/gst-libs/gst/codecparsers/gstvc1parser.c | 15 | --- a/gst-libs/gst/codecparsers/gstvc1parser.c |
17 | +++ b/gst-libs/gst/codecparsers/gstvc1parser.c | 16 | +++ b/gst-libs/gst/codecparsers/gstvc1parser.c |
18 | @@ -1730,7 +1730,7 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size, | 17 | @@ -1730,7 +1730,7 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size, |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch index 6509a293b7..298337cb0a 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-avoid-including-sys-poll.h-directly.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 426ee79adeecc24605426030a486d7c5a755048c Mon Sep 17 00:00:00 2001 | 1 | From 25efc5b2d22aed4527470ef93970664b210bf8c5 Mon Sep 17 00:00:00 2001 |
2 | From: Andre McCurdy <armccurdy@gmail.com> | 2 | From: Andre McCurdy <armccurdy@gmail.com> |
3 | Date: Wed, 3 Feb 2016 18:05:41 -0800 | 3 | Date: Wed, 3 Feb 2016 18:05:41 -0800 |
4 | Subject: [PATCH] avoid including <sys/poll.h> directly | 4 | Subject: [PATCH] avoid including <sys/poll.h> directly |
@@ -8,13 +8,12 @@ musl libc generates warnings if <sys/poll.h> is included directly. | |||
8 | Upstream-Status: Pending | 8 | Upstream-Status: Pending |
9 | 9 | ||
10 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> | 10 | Signed-off-by: Andre McCurdy <armccurdy@gmail.com> |
11 | |||
12 | --- | 11 | --- |
13 | sys/dvb/gstdvbsrc.c | 2 +- | 12 | sys/dvb/gstdvbsrc.c | 2 +- |
14 | 1 file changed, 1 insertion(+), 1 deletion(-) | 13 | 1 file changed, 1 insertion(+), 1 deletion(-) |
15 | 14 | ||
16 | diff --git a/sys/dvb/gstdvbsrc.c b/sys/dvb/gstdvbsrc.c | 15 | diff --git a/sys/dvb/gstdvbsrc.c b/sys/dvb/gstdvbsrc.c |
17 | index b8e5b1a..5bca6e5 100644 | 16 | index 33ee3ff..b8ddea9 100644 |
18 | --- a/sys/dvb/gstdvbsrc.c | 17 | --- a/sys/dvb/gstdvbsrc.c |
19 | +++ b/sys/dvb/gstdvbsrc.c | 18 | +++ b/sys/dvb/gstdvbsrc.c |
20 | @@ -98,7 +98,7 @@ | 19 | @@ -98,7 +98,7 @@ |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch index 50a3143eca..965053348f 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From e6eb7536fcfc2c75f7831f67b1f16f3f36ef2545 Mon Sep 17 00:00:00 2001 | 1 | From 39fbcdfc5383bdae95293a3109c847f64d193b54 Mon Sep 17 00:00:00 2001 |
2 | From: Andrey Zhizhikin <andrey.z@gmail.com> | 2 | From: Andrey Zhizhikin <andrey.z@gmail.com> |
3 | Date: Mon, 27 Jan 2020 10:22:35 +0000 | 3 | Date: Mon, 27 Jan 2020 10:22:35 +0000 |
4 | Subject: [PATCH] opencv: resolve missing opencv data dir in yocto build | 4 | Subject: [PATCH] opencv: resolve missing opencv data dir in yocto build |
@@ -12,18 +12,17 @@ Upstream-Status: Inappropriate [OE-specific] | |||
12 | 12 | ||
13 | Signed-off-by: Andrey Zhizhikin <andrey.z@gmail.com> | 13 | Signed-off-by: Andrey Zhizhikin <andrey.z@gmail.com> |
14 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | 14 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> |
15 | |||
16 | --- | 15 | --- |
17 | ext/opencv/meson.build | 3 +++ | 16 | gst-libs/gst/opencv/meson.build | 3 +++ |
18 | 1 file changed, 3 insertions(+) | 17 | 1 file changed, 3 insertions(+) |
19 | 18 | ||
20 | diff --git a/ext/opencv/meson.build b/ext/opencv/meson.build | 19 | diff --git a/gst-libs/gst/opencv/meson.build b/gst-libs/gst/opencv/meson.build |
21 | index 37e2015..326f737 100644 | 20 | index 1d337f1..f47939a 100644 |
22 | --- a/ext/opencv/meson.build | 21 | --- a/gst-libs/gst/opencv/meson.build |
23 | +++ b/ext/opencv/meson.build | 22 | +++ b/gst-libs/gst/opencv/meson.build |
24 | @@ -87,6 +87,9 @@ if opencv_found | 23 | @@ -52,6 +52,9 @@ if opencv_found |
25 | opencv_prefix = opencv_dep.get_variable('prefix') | 24 | # /usr/include/opencv4/opencv2/flann/logger.h:83:36: error: format string is not a string literal [-Werror,-Wformat-nonliteral] |
26 | gstopencv_cargs += ['-DOPENCV_PREFIX="' + opencv_prefix + '"'] | 25 | gstopencv_cargs += cxx.get_supported_arguments(['-Wno-missing-include-dirs', '-Wno-format-nonliteral']) |
27 | 26 | ||
28 | + pkgconf_sysroot = run_command(python3, '-c', 'import os; print(os.environ.get("PKG_CONFIG_SYSROOT_DIR"))').stdout().strip() | 27 | + pkgconf_sysroot = run_command(python3, '-c', 'import os; print(os.environ.get("PKG_CONFIG_SYSROOT_DIR"))').stdout().strip() |
29 | + opencv_prefix = pkgconf_sysroot + opencv_prefix | 28 | + opencv_prefix = pkgconf_sysroot + opencv_prefix |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.24.3.bb index 523ee7a5ae..693ee06669 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.24.3.bb | |||
@@ -10,7 +10,7 @@ SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-bad/gst-plugins-bad | |||
10 | file://0002-avoid-including-sys-poll.h-directly.patch \ | 10 | file://0002-avoid-including-sys-poll.h-directly.patch \ |
11 | file://0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch \ | 11 | file://0004-opencv-resolve-missing-opencv-data-dir-in-yocto-buil.patch \ |
12 | " | 12 | " |
13 | SRC_URI[sha256sum] = "808d3b33fc4c71aeb2561c364a87c2e8a3e2343319a83244c8391be4b09499c8" | 13 | SRC_URI[sha256sum] = "e90f26c7dc9c76f4aa599b758cfd6d8c10d6a0b9cb265ba2c3c9bdf3888558f8" |
14 | 14 | ||
15 | S = "${WORKDIR}/gst-plugins-bad-${PV}" | 15 | S = "${WORKDIR}/gst-plugins-bad-${PV}" |
16 | 16 | ||
@@ -60,7 +60,7 @@ PACKAGECONFIG[libde265] = "-Dlibde265=enabled,-Dlibde265=disabled,libde26 | |||
60 | PACKAGECONFIG[libssh2] = "-Dcurl-ssh2=enabled,-Dcurl-ssh2=disabled,libssh2" | 60 | PACKAGECONFIG[libssh2] = "-Dcurl-ssh2=enabled,-Dcurl-ssh2=disabled,libssh2" |
61 | PACKAGECONFIG[lcms2] = "-Dcolormanagement=enabled,-Dcolormanagement=disabled,lcms" | 61 | PACKAGECONFIG[lcms2] = "-Dcolormanagement=enabled,-Dcolormanagement=disabled,lcms" |
62 | PACKAGECONFIG[modplug] = "-Dmodplug=enabled,-Dmodplug=disabled,libmodplug" | 62 | PACKAGECONFIG[modplug] = "-Dmodplug=enabled,-Dmodplug=disabled,libmodplug" |
63 | PACKAGECONFIG[msdk] = "-Dmsdk=enabled -Dmfx_api=oneVPL,-Dmsdk=disabled,onevpl-intel-gpu" | 63 | PACKAGECONFIG[msdk] = "-Dmsdk=enabled -Dmfx_api=oneVPL,-Dmsdk=disabled,vpl-gpu-rt" |
64 | PACKAGECONFIG[neon] = "-Dneon=enabled,-Dneon=disabled,neon" | 64 | PACKAGECONFIG[neon] = "-Dneon=enabled,-Dneon=disabled,neon" |
65 | PACKAGECONFIG[openal] = "-Dopenal=enabled,-Dopenal=disabled,openal-soft" | 65 | PACKAGECONFIG[openal] = "-Dopenal=enabled,-Dopenal=disabled,openal-soft" |
66 | PACKAGECONFIG[opencv] = "-Dopencv=enabled,-Dopencv=disabled,opencv" | 66 | PACKAGECONFIG[opencv] = "-Dopencv=enabled,-Dopencv=disabled,opencv" |
@@ -124,7 +124,6 @@ EXTRA_OEMESON += " \ | |||
124 | -Dgs=disabled \ | 124 | -Dgs=disabled \ |
125 | -Dgsm=disabled \ | 125 | -Dgsm=disabled \ |
126 | -Diqa=disabled \ | 126 | -Diqa=disabled \ |
127 | -Dkate=disabled \ | ||
128 | -Dladspa=disabled \ | 127 | -Dladspa=disabled \ |
129 | -Dldac=disabled \ | 128 | -Dldac=disabled \ |
130 | -Dlv2=disabled \ | 129 | -Dlv2=disabled \ |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch index 34c99ded42..38d0fbf6bf 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0001-ENGR00312515-get-caps-from-src-pad-when-query-caps.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 63fe5a7b4ef70e2c490bad3b0838329935a8d77c Mon Sep 17 00:00:00 2001 | 1 | From 5a4d65e13a326103ac02f156c01b6fcbb0269902 Mon Sep 17 00:00:00 2001 |
2 | From: zhouming <b42586@freescale.com> | 2 | From: zhouming <b42586@freescale.com> |
3 | Date: Wed, 14 May 2014 10:16:20 +0800 | 3 | Date: Wed, 14 May 2014 10:16:20 +0800 |
4 | Subject: [PATCH] ENGR00312515: get caps from src pad when query caps | 4 | Subject: [PATCH] ENGR00312515: get caps from src pad when query caps |
@@ -8,7 +8,6 @@ https://bugzilla.gnome.org/show_bug.cgi?id=728312 | |||
8 | Upstream-Status: Pending | 8 | Upstream-Status: Pending |
9 | 9 | ||
10 | Signed-off-by: zhouming <b42586@freescale.com> | 10 | Signed-off-by: zhouming <b42586@freescale.com> |
11 | |||
12 | --- | 11 | --- |
13 | gst-libs/gst/tag/gsttagdemux.c | 13 +++++++++++++ | 12 | gst-libs/gst/tag/gsttagdemux.c | 13 +++++++++++++ |
14 | 1 file changed, 13 insertions(+) | 13 | 1 file changed, 13 insertions(+) |
@@ -17,7 +16,7 @@ Signed-off-by: zhouming <b42586@freescale.com> | |||
17 | diff --git a/gst-libs/gst/tag/gsttagdemux.c b/gst-libs/gst/tag/gsttagdemux.c | 16 | diff --git a/gst-libs/gst/tag/gsttagdemux.c b/gst-libs/gst/tag/gsttagdemux.c |
18 | old mode 100644 | 17 | old mode 100644 |
19 | new mode 100755 | 18 | new mode 100755 |
20 | index 173da37..2b7f34c | 19 | index ef0ff90..53f05c8 |
21 | --- a/gst-libs/gst/tag/gsttagdemux.c | 20 | --- a/gst-libs/gst/tag/gsttagdemux.c |
22 | +++ b/gst-libs/gst/tag/gsttagdemux.c | 21 | +++ b/gst-libs/gst/tag/gsttagdemux.c |
23 | @@ -1796,6 +1796,19 @@ gst_tag_demux_pad_query (GstPad * pad, GstObject * parent, GstQuery * query) | 22 | @@ -1796,6 +1796,19 @@ gst_tag_demux_pad_query (GstPad * pad, GstObject * parent, GstQuery * query) |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch index 2adeae93d6..4090b77c8d 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0002-ssaparse-enhance-SSA-text-lines-parsing.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 7bf9525528c8f4a47413d7f82214d76f95f0c5f6 Mon Sep 17 00:00:00 2001 | 1 | From 21b8339077f52ec9220e0d3d1f3872e23c7c2e88 Mon Sep 17 00:00:00 2001 |
2 | From: Mingke Wang <mingke.wang@freescale.com> | 2 | From: Mingke Wang <mingke.wang@freescale.com> |
3 | Date: Thu, 19 Mar 2015 14:17:10 +0800 | 3 | Date: Thu, 19 Mar 2015 14:17:10 +0800 |
4 | Subject: [PATCH] ssaparse: enhance SSA text lines parsing. | 4 | Subject: [PATCH] ssaparse: enhance SSA text lines parsing. |
@@ -9,7 +9,6 @@ and there's are maybe multiple Dialog lines in one input buffer. | |||
9 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/issues/178] | 9 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/issues/178] |
10 | 10 | ||
11 | Signed-off-by: Mingke Wang <mingke.wang@freescale.com> | 11 | Signed-off-by: Mingke Wang <mingke.wang@freescale.com> |
12 | |||
13 | --- | 12 | --- |
14 | gst/subparse/gstssaparse.c | 150 +++++++++++++++++++++++++++++++++---- | 13 | gst/subparse/gstssaparse.c | 150 +++++++++++++++++++++++++++++++++---- |
15 | 1 file changed, 134 insertions(+), 16 deletions(-) | 14 | 1 file changed, 134 insertions(+), 16 deletions(-) |
@@ -18,7 +17,7 @@ Signed-off-by: Mingke Wang <mingke.wang@freescale.com> | |||
18 | diff --git a/gst/subparse/gstssaparse.c b/gst/subparse/gstssaparse.c | 17 | diff --git a/gst/subparse/gstssaparse.c b/gst/subparse/gstssaparse.c |
19 | old mode 100644 | 18 | old mode 100644 |
20 | new mode 100755 | 19 | new mode 100755 |
21 | index d6fdb9c..5ebe678 | 20 | index 42fbb42..2dab51c |
22 | --- a/gst/subparse/gstssaparse.c | 21 | --- a/gst/subparse/gstssaparse.c |
23 | +++ b/gst/subparse/gstssaparse.c | 22 | +++ b/gst/subparse/gstssaparse.c |
24 | @@ -270,6 +270,7 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt) | 23 | @@ -270,6 +270,7 @@ gst_ssa_parse_remove_override_codes (GstSsaParse * parse, gchar * txt) |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch index a605533be8..ab46ecee5d 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base/0003-viv-fb-Make-sure-config.h-is-included.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 2b07840122bc2e83bd23dad59aa80d9479f2e1e4 Mon Sep 17 00:00:00 2001 | 1 | From 246e6f5ed75696b2e9d9a9ee8cd59252724e1334 Mon Sep 17 00:00:00 2001 |
2 | From: Carlos Rafael Giani <crg7475@mailbox.org> | 2 | From: Carlos Rafael Giani <crg7475@mailbox.org> |
3 | Date: Tue, 21 May 2019 14:01:11 +0200 | 3 | Date: Tue, 21 May 2019 14:01:11 +0200 |
4 | Subject: [PATCH] viv-fb: Make sure config.h is included | 4 | Subject: [PATCH] viv-fb: Make sure config.h is included |
@@ -8,7 +8,6 @@ This prevents build errors due to missing GST_API_* symbols | |||
8 | Upstream-Status: Pending | 8 | Upstream-Status: Pending |
9 | 9 | ||
10 | Signed-off-by: Carlos Rafael Giani <crg7475@mailbox.org> | 10 | Signed-off-by: Carlos Rafael Giani <crg7475@mailbox.org> |
11 | |||
12 | --- | 11 | --- |
13 | gst-libs/gst/gl/gl-prelude.h | 4 ++++ | 12 | gst-libs/gst/gl/gl-prelude.h | 4 ++++ |
14 | 1 file changed, 4 insertions(+) | 13 | 1 file changed, 4 insertions(+) |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.24.3.bb index 7aa10eb646..8a967cf6eb 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.24.3.bb | |||
@@ -11,7 +11,7 @@ SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-base/gst-plugins-ba | |||
11 | file://0003-viv-fb-Make-sure-config.h-is-included.patch \ | 11 | file://0003-viv-fb-Make-sure-config.h-is-included.patch \ |
12 | file://0002-ssaparse-enhance-SSA-text-lines-parsing.patch \ | 12 | file://0002-ssaparse-enhance-SSA-text-lines-parsing.patch \ |
13 | " | 13 | " |
14 | SRC_URI[sha256sum] = "65eaf72296cc5edc985695a4d80affc931e64a79f4879d05615854f7a2cf5bd1" | 14 | SRC_URI[sha256sum] = "f1094397eaa7932f06e57ebbb075aa33aa2c76e4b75630a16b02c8d4af46832e" |
15 | 15 | ||
16 | S = "${WORKDIR}/gst-plugins-base-${PV}" | 16 | S = "${WORKDIR}/gst-plugins-base-${PV}" |
17 | 17 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch deleted file mode 100644 index 33bd4200f6..0000000000 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch +++ /dev/null | |||
@@ -1,38 +0,0 @@ | |||
1 | From b77d4806fd5de50d0b017a3e6a19c5bfdef7b3e4 Mon Sep 17 00:00:00 2001 | ||
2 | From: Khem Raj <raj.khem@gmail.com> | ||
3 | Date: Mon, 13 Feb 2023 12:47:31 -0800 | ||
4 | Subject: [PATCH] v4l2: Define ioctl_req_t for posix/linux case | ||
5 | |||
6 | this is an issue seen with musl based linux distros e.g. alpine [1] | ||
7 | musl is not going to change this since it breaks ABI/API interfaces | ||
8 | Newer compilers are stringent ( e.g. clang16 ) which can now detect | ||
9 | signature mismatches in function pointers too, existing code warned but | ||
10 | did not error with older clang | ||
11 | |||
12 | Fixes | ||
13 | gstv4l2object.c:544:23: error: incompatible function pointer types assigning to 'gint (*)(gint, ioctl_req_t, ...)' (aka 'int (*)(int, unsigned long, ...)') from 'int (int, int, ...)' [-Wincompatible-function-pointer-types] | ||
14 | v4l2object->ioctl = ioctl; | ||
15 | ^ ~~~~~ | ||
16 | |||
17 | [1] https://gitlab.alpinelinux.org/alpine/aports/-/issues/7580 | ||
18 | |||
19 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/3950] | ||
20 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | ||
21 | |||
22 | --- | ||
23 | sys/v4l2/gstv4l2object.h | 2 ++ | ||
24 | 1 file changed, 2 insertions(+) | ||
25 | |||
26 | diff --git a/sys/v4l2/gstv4l2object.h b/sys/v4l2/gstv4l2object.h | ||
27 | index d95b375..5223cbb 100644 | ||
28 | --- a/sys/v4l2/gstv4l2object.h | ||
29 | +++ b/sys/v4l2/gstv4l2object.h | ||
30 | @@ -76,6 +76,8 @@ typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object); | ||
31 | * 'unsigned long' for the 2nd parameter */ | ||
32 | #ifdef __ANDROID__ | ||
33 | typedef unsigned ioctl_req_t; | ||
34 | +#elif defined(__linux__) && !defined(__GLIBC__) /* musl/linux */ | ||
35 | +typedef int ioctl_req_t; | ||
36 | #else | ||
37 | typedef gulong ioctl_req_t; | ||
38 | #endif | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.24.3.bb index 9ce3f73f7c..41a93e85a4 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.24.3.bb | |||
@@ -4,10 +4,9 @@ SUMMARY = "'Good' GStreamer plugins" | |||
4 | HOMEPAGE = "https://gstreamer.freedesktop.org/" | 4 | HOMEPAGE = "https://gstreamer.freedesktop.org/" |
5 | BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/issues" | 5 | BUGTRACKER = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/issues" |
6 | 6 | ||
7 | SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-${PV}.tar.xz \ | 7 | SRC_URI = "https://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-${PV}.tar.xz" |
8 | file://0001-v4l2-Define-ioctl_req_t-for-posix-linux-case.patch" | ||
9 | 8 | ||
10 | SRC_URI[sha256sum] = "6ddd032381827d31820540735f0004b429436b0bdac19aaeab44fa22faad52e2" | 9 | SRC_URI[sha256sum] = "150f914e61dc05600b68b88ca103c7cc227130158e389ea9ea159f4050a2ebb0" |
11 | 10 | ||
12 | S = "${WORKDIR}/gst-plugins-good-${PV}" | 11 | S = "${WORKDIR}/gst-plugins-good-${PV}" |
13 | 12 | ||
@@ -36,6 +35,8 @@ X11DISABLEOPTS = "-Dximagesrc=disabled -Dximagesrc-xshm=disabled -Dximagesrc-xfi | |||
36 | 35 | ||
37 | QT5WAYLANDDEPENDS = "${@bb.utils.contains("DISTRO_FEATURES", "wayland", "qtwayland", "", d)}" | 36 | QT5WAYLANDDEPENDS = "${@bb.utils.contains("DISTRO_FEATURES", "wayland", "qtwayland", "", d)}" |
38 | 37 | ||
38 | PACKAGECONFIG[amrnb] = "-Damrnb=enabled,-Damrnb=disabled,opencore-amr" | ||
39 | PACKAGECONFIG[amrwb] = "-Damrwbdec=enabled,-Damrwbdec=disabled,opencore-amr" | ||
39 | PACKAGECONFIG[asm] = "-Dasm=enabled,-Dasm=disabled,nasm-native" | 40 | PACKAGECONFIG[asm] = "-Dasm=enabled,-Dasm=disabled,nasm-native" |
40 | PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2" | 41 | PACKAGECONFIG[bz2] = "-Dbz2=enabled,-Dbz2=disabled,bzip2" |
41 | PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo" | 42 | PACKAGECONFIG[cairo] = "-Dcairo=enabled,-Dcairo=disabled,cairo" |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.24.3.bb index 99f41d4f3d..533de0234f 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.24.3.bb | |||
@@ -15,7 +15,7 @@ SRC_URI = " \ | |||
15 | https://gstreamer.freedesktop.org/src/gst-plugins-ugly/gst-plugins-ugly-${PV}.tar.xz \ | 15 | https://gstreamer.freedesktop.org/src/gst-plugins-ugly/gst-plugins-ugly-${PV}.tar.xz \ |
16 | " | 16 | " |
17 | 17 | ||
18 | SRC_URI[sha256sum] = "7758b7decfd20c00cae5700822bcbbf03f98c723e33e17634db2e07ca1da60bf" | 18 | SRC_URI[sha256sum] = "4c951341c4c648630b6fe1234ec113d81dd2d248529bf2b5478e0ad077c80ed3" |
19 | 19 | ||
20 | S = "${WORKDIR}/gst-plugins-ugly-${PV}" | 20 | S = "${WORKDIR}/gst-plugins-ugly-${PV}" |
21 | 21 | ||
@@ -27,8 +27,6 @@ PACKAGECONFIG ??= " \ | |||
27 | ${GSTREAMER_ORC} \ | 27 | ${GSTREAMER_ORC} \ |
28 | " | 28 | " |
29 | 29 | ||
30 | PACKAGECONFIG[amrnb] = "-Damrnb=enabled,-Damrnb=disabled,opencore-amr" | ||
31 | PACKAGECONFIG[amrwb] = "-Damrwbdec=enabled,-Damrwbdec=disabled,opencore-amr" | ||
32 | PACKAGECONFIG[a52dec] = "-Da52dec=enabled,-Da52dec=disabled,liba52" | 30 | PACKAGECONFIG[a52dec] = "-Da52dec=enabled,-Da52dec=disabled,liba52" |
33 | PACKAGECONFIG[cdio] = "-Dcdio=enabled,-Dcdio=disabled,libcdio" | 31 | PACKAGECONFIG[cdio] = "-Dcdio=enabled,-Dcdio=disabled,libcdio" |
34 | PACKAGECONFIG[dvdread] = "-Ddvdread=enabled,-Ddvdread=disabled,libdvdread" | 32 | PACKAGECONFIG[dvdread] = "-Ddvdread=enabled,-Ddvdread=disabled,libdvdread" |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.24.3.bb index 0fbb03f757..5fbf860741 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-python_1.24.3.bb | |||
@@ -8,7 +8,7 @@ LICENSE = "LGPL-2.1-or-later" | |||
8 | LIC_FILES_CHKSUM = "file://COPYING;md5=c34deae4e395ca07e725ab0076a5f740" | 8 | LIC_FILES_CHKSUM = "file://COPYING;md5=c34deae4e395ca07e725ab0076a5f740" |
9 | 9 | ||
10 | SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz" | 10 | SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz" |
11 | SRC_URI[sha256sum] = "f7a5450d93fd81bf46060dca7f4a048d095b6717961fec211731a11a994c99a7" | 11 | SRC_URI[sha256sum] = "ecdb3e2ba94ea2c82b93a8c715d5a7e04f9726a8838c0a6b17694928fd1e8595" |
12 | 12 | ||
13 | DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject" | 13 | DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject" |
14 | RDEPENDS:${PN} += "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject" | 14 | RDEPENDS:${PN} += "gstreamer1.0 gstreamer1.0-plugins-base python3-pygobject" |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.24.3.bb index 554ed9ec8f..f63d61b2c9 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.24.3.bb | |||
@@ -10,7 +10,7 @@ PNREAL = "gst-rtsp-server" | |||
10 | 10 | ||
11 | SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz" | 11 | SRC_URI = "https://gstreamer.freedesktop.org/src/${PNREAL}/${PNREAL}-${PV}.tar.xz" |
12 | 12 | ||
13 | SRC_URI[sha256sum] = "ec49d474750a6ff6729c85b448abc607fb6840b21717ad7abc967e2adbf07a24" | 13 | SRC_URI[sha256sum] = "62628ecca78b8f5b51c59a4d602c65e9205ffc50c3c83cd61fab1f6348b63565" |
14 | 14 | ||
15 | S = "${WORKDIR}/${PNREAL}-${PV}" | 15 | S = "${WORKDIR}/${PNREAL}-${PV}" |
16 | 16 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.24.3.bb index 87eb8484a1..8ddfe15de9 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.24.3.bb | |||
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c" | |||
11 | 11 | ||
12 | SRC_URI = "https://gstreamer.freedesktop.org/src/${REALPN}/${REALPN}-${PV}.tar.xz" | 12 | SRC_URI = "https://gstreamer.freedesktop.org/src/${REALPN}/${REALPN}-${PV}.tar.xz" |
13 | 13 | ||
14 | SRC_URI[sha256sum] = "6eae1360658302b9b512fa46b4d06f5b818dfce5f2f43d7d710ca8142719d8ad" | 14 | SRC_URI[sha256sum] = "845f2efe0dca8dab234dde8fb091da2cd06a9d2a683422b56dcb688954f9070e" |
15 | 15 | ||
16 | S = "${WORKDIR}/${REALPN}-${PV}" | 16 | S = "${WORKDIR}/${REALPN}-${PV}" |
17 | DEPENDS = "libva gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad" | 17 | DEPENDS = "libva gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad" |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch index 0d839bd6c8..a190370c16 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0001-tests-respect-the-idententaion-used-in-meson.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 5372cd5bf2a9dd247b9c5fc6e4fe248046dbb085 Mon Sep 17 00:00:00 2001 | 1 | From 381f50a27766c425af36a7203061716497e6603f Mon Sep 17 00:00:00 2001 |
2 | From: Jose Quaresma <quaresma.jose@gmail.com> | 2 | From: Jose Quaresma <quaresma.jose@gmail.com> |
3 | Date: Sun, 11 Apr 2021 19:48:13 +0100 | 3 | Date: Sun, 11 Apr 2021 19:48:13 +0100 |
4 | Subject: [PATCH] tests: respect the idententaion used in meson | 4 | Subject: [PATCH] tests: respect the idententaion used in meson |
@@ -6,16 +6,15 @@ Subject: [PATCH] tests: respect the idententaion used in meson | |||
6 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] | 6 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] |
7 | 7 | ||
8 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | 8 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> |
9 | |||
10 | --- | 9 | --- |
11 | tests/check/meson.build | 10 +++++----- | 10 | tests/check/meson.build | 10 +++++----- |
12 | 1 file changed, 5 insertions(+), 5 deletions(-) | 11 | 1 file changed, 5 insertions(+), 5 deletions(-) |
13 | 12 | ||
14 | diff --git a/tests/check/meson.build b/tests/check/meson.build | 13 | diff --git a/tests/check/meson.build b/tests/check/meson.build |
15 | index 9787b0a..16caac7 100644 | 14 | index 6540279..e9501fe 100644 |
16 | --- a/tests/check/meson.build | 15 | --- a/tests/check/meson.build |
17 | +++ b/tests/check/meson.build | 16 | +++ b/tests/check/meson.build |
18 | @@ -145,11 +145,11 @@ foreach t : core_tests | 17 | @@ -146,11 +146,11 @@ foreach t : core_tests |
19 | 18 | ||
20 | if not skip_test | 19 | if not skip_test |
21 | exe = executable(test_name, fname, | 20 | exe = executable(test_name, fname, |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch index 64717e66c3..a990940cc6 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0002-tests-add-support-for-install-the-tests.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 27e977d03b0f7c1d0bf19621ef0cec0585410e7b Mon Sep 17 00:00:00 2001 | 1 | From 31dea17a1d5de0003719a875a1089df43a50219a Mon Sep 17 00:00:00 2001 |
2 | From: Jose Quaresma <quaresma.jose@gmail.com> | 2 | From: Jose Quaresma <quaresma.jose@gmail.com> |
3 | Date: Sun, 11 Apr 2021 19:48:13 +0100 | 3 | Date: Sun, 11 Apr 2021 19:48:13 +0100 |
4 | Subject: [PATCH] tests: add support for install the tests | 4 | Subject: [PATCH] tests: add support for install the tests |
@@ -10,7 +10,6 @@ This will provide to run the tests using the gnome-desktop-testing [1] | |||
10 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] | 10 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] |
11 | 11 | ||
12 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | 12 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> |
13 | |||
14 | --- | 13 | --- |
15 | meson.build | 4 ++++ | 14 | meson.build | 4 ++++ |
16 | meson_options.txt | 1 + | 15 | meson_options.txt | 1 + |
@@ -20,10 +19,10 @@ Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | |||
20 | create mode 100644 tests/check/template.test.in | 19 | create mode 100644 tests/check/template.test.in |
21 | 20 | ||
22 | diff --git a/meson.build b/meson.build | 21 | diff --git a/meson.build b/meson.build |
23 | index 60c7bec..f7650b1 100644 | 22 | index efcf189..c97d9a8 100644 |
24 | --- a/meson.build | 23 | --- a/meson.build |
25 | +++ b/meson.build | 24 | +++ b/meson.build |
26 | @@ -606,6 +606,10 @@ if bashcomp_dep.found() | 25 | @@ -624,6 +624,10 @@ if bashcomp_dep.found() |
27 | endif | 26 | endif |
28 | endif | 27 | endif |
29 | 28 | ||
@@ -35,10 +34,10 @@ index 60c7bec..f7650b1 100644 | |||
35 | 34 | ||
36 | pkgconfig = import('pkgconfig') | 35 | pkgconfig = import('pkgconfig') |
37 | diff --git a/meson_options.txt b/meson_options.txt | 36 | diff --git a/meson_options.txt b/meson_options.txt |
38 | index 7363bdb..a34ba37 100644 | 37 | index 340fb58..5b87f68 100644 |
39 | --- a/meson_options.txt | 38 | --- a/meson_options.txt |
40 | +++ b/meson_options.txt | 39 | +++ b/meson_options.txt |
41 | @@ -15,6 +15,7 @@ option('poisoning', type : 'boolean', value : false, description : 'Enable poiso | 40 | @@ -16,6 +16,7 @@ option('poisoning', type : 'boolean', value : false, description : 'Enable poiso |
42 | option('memory-alignment', type: 'combo', | 41 | option('memory-alignment', type: 'combo', |
43 | choices : ['1', '2', '4', '8', '16', '32', '64', '128', '256', '512', '1024', '2048', '4096', '8192', 'malloc', 'pagesize'], | 42 | choices : ['1', '2', '4', '8', '16', '32', '64', '128', '256', '512', '1024', '2048', '4096', '8192', 'malloc', 'pagesize'], |
44 | value: 'malloc') | 43 | value: 'malloc') |
@@ -47,14 +46,14 @@ index 7363bdb..a34ba37 100644 | |||
47 | # Feature options | 46 | # Feature options |
48 | option('check', type : 'feature', value : 'auto', description : 'Build unit test libraries') | 47 | option('check', type : 'feature', value : 'auto', description : 'Build unit test libraries') |
49 | diff --git a/tests/check/meson.build b/tests/check/meson.build | 48 | diff --git a/tests/check/meson.build b/tests/check/meson.build |
50 | index 16caac7..f2d400f 100644 | 49 | index e9501fe..08f8fd8 100644 |
51 | --- a/tests/check/meson.build | 50 | --- a/tests/check/meson.build |
52 | +++ b/tests/check/meson.build | 51 | +++ b/tests/check/meson.build |
53 | @@ -124,10 +124,16 @@ test_defines = [ | 52 | @@ -125,10 +125,16 @@ test_defines = [ |
54 | '-UG_DISABLE_ASSERT', | 53 | '-UG_DISABLE_ASSERT', |
55 | '-UG_DISABLE_CAST_CHECKS', | 54 | '-UG_DISABLE_CAST_CHECKS', |
56 | '-DGST_CHECK_TEST_ENVIRONMENT_BEACON="GST_STATE_IGNORE_ELEMENTS"', | 55 | '-DGST_CHECK_TEST_ENVIRONMENT_BEACON="GST_STATE_IGNORE_ELEMENTS"', |
57 | - '-DTESTFILE="' + meson.current_source_dir() + '/meson.build"', | 56 | - '-DTESTFILE="' + fsmod.as_posix(meson.current_source_dir()) + '/meson.build"', |
58 | '-DGST_DISABLE_DEPRECATED', | 57 | '-DGST_DISABLE_DEPRECATED', |
59 | ] | 58 | ] |
60 | 59 | ||
@@ -68,7 +67,7 @@ index 16caac7..f2d400f 100644 | |||
68 | # sanity checking | 67 | # sanity checking |
69 | if get_option('check').disabled() | 68 | if get_option('check').disabled() |
70 | if get_option('tests').enabled() | 69 | if get_option('tests').enabled() |
71 | @@ -150,6 +156,8 @@ foreach t : core_tests | 70 | @@ -151,6 +157,8 @@ foreach t : core_tests |
72 | include_directories : [configinc], | 71 | include_directories : [configinc], |
73 | link_with : link_with_libs, | 72 | link_with : link_with_libs, |
74 | dependencies : gst_deps + test_deps, | 73 | dependencies : gst_deps + test_deps, |
@@ -77,7 +76,7 @@ index 16caac7..f2d400f 100644 | |||
77 | ) | 76 | ) |
78 | 77 | ||
79 | env = environment() | 78 | env = environment() |
80 | @@ -161,6 +169,18 @@ foreach t : core_tests | 79 | @@ -162,6 +170,18 @@ foreach t : core_tests |
81 | env.set('GST_PLUGIN_SCANNER_1_0', gst_scanner_dir + '/gst-plugin-scanner') | 80 | env.set('GST_PLUGIN_SCANNER_1_0', gst_scanner_dir + '/gst-plugin-scanner') |
82 | env.set('GST_PLUGIN_LOADING_WHITELIST', 'gstreamer') | 81 | env.set('GST_PLUGIN_LOADING_WHITELIST', 'gstreamer') |
83 | 82 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch index 0fd830f150..27623bf96b 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0003-tests-use-a-dictionaries-for-environment.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 7041bc5adf9501beb1428d8bbae6b351a6bf07f9 Mon Sep 17 00:00:00 2001 | 1 | From 453865e8938c5f50f14409b67246bbbd801fa44d Mon Sep 17 00:00:00 2001 |
2 | From: Jose Quaresma <quaresma.jose@gmail.com> | 2 | From: Jose Quaresma <quaresma.jose@gmail.com> |
3 | Date: Sat, 24 Apr 2021 10:34:47 +0100 | 3 | Date: Sat, 24 Apr 2021 10:34:47 +0100 |
4 | Subject: [PATCH] tests: use a dictionaries for environment | 4 | Subject: [PATCH] tests: use a dictionaries for environment |
@@ -9,16 +9,15 @@ use a dictionary as this is simplest solution to install the environment. | |||
9 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] | 9 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] |
10 | 10 | ||
11 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | 11 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> |
12 | |||
13 | --- | 12 | --- |
14 | tests/check/meson.build | 21 +++++++++++++-------- | 13 | tests/check/meson.build | 21 +++++++++++++-------- |
15 | 1 file changed, 13 insertions(+), 8 deletions(-) | 14 | 1 file changed, 13 insertions(+), 8 deletions(-) |
16 | 15 | ||
17 | diff --git a/tests/check/meson.build b/tests/check/meson.build | 16 | diff --git a/tests/check/meson.build b/tests/check/meson.build |
18 | index f2d400f..50dff7f 100644 | 17 | index 08f8fd8..330abaa 100644 |
19 | --- a/tests/check/meson.build | 18 | --- a/tests/check/meson.build |
20 | +++ b/tests/check/meson.build | 19 | +++ b/tests/check/meson.build |
21 | @@ -160,14 +160,19 @@ foreach t : core_tests | 20 | @@ -161,14 +161,19 @@ foreach t : core_tests |
22 | install: installed_tests_enabled, | 21 | install: installed_tests_enabled, |
23 | ) | 22 | ) |
24 | 23 | ||
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch index 5689dc9fbb..97b056072d 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/0004-tests-add-helper-script-to-run-the-installed_tests.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 1b1d1ce4227b6bea7c7def5dac4a663486e070c2 Mon Sep 17 00:00:00 2001 | 1 | From 506ff13a044439a170f770b6663a64c942ae0a05 Mon Sep 17 00:00:00 2001 |
2 | From: Jose Quaresma <quaresma.jose@gmail.com> | 2 | From: Jose Quaresma <quaresma.jose@gmail.com> |
3 | Date: Sun, 2 May 2021 01:58:01 +0100 | 3 | Date: Sun, 2 May 2021 01:58:01 +0100 |
4 | Subject: [PATCH] tests: add helper script to run the installed_tests | 4 | Subject: [PATCH] tests: add helper script to run the installed_tests |
@@ -10,7 +10,6 @@ testing framework. | |||
10 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] | 10 | Upstream-Status: Submitted [https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/789] |
11 | 11 | ||
12 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | 12 | Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> |
13 | |||
14 | --- | 13 | --- |
15 | tests/check/meson.build | 17 +++++++++++++++++ | 14 | tests/check/meson.build | 17 +++++++++++++++++ |
16 | tests/check/template.sh.in | 9 +++++++++ | 15 | tests/check/template.sh.in | 9 +++++++++ |
@@ -19,10 +18,10 @@ Signed-off-by: Jose Quaresma <quaresma.jose@gmail.com> | |||
19 | create mode 100755 tests/check/template.sh.in | 18 | create mode 100755 tests/check/template.sh.in |
20 | 19 | ||
21 | diff --git a/tests/check/meson.build b/tests/check/meson.build | 20 | diff --git a/tests/check/meson.build b/tests/check/meson.build |
22 | index 50dff7f..2b9e979 100644 | 21 | index 330abaa..5d383b1 100644 |
23 | --- a/tests/check/meson.build | 22 | --- a/tests/check/meson.build |
24 | +++ b/tests/check/meson.build | 23 | +++ b/tests/check/meson.build |
25 | @@ -184,6 +184,23 @@ foreach t : core_tests | 24 | @@ -185,6 +185,23 @@ foreach t : core_tests |
26 | install_dir: installed_tests_metadir, | 25 | install_dir: installed_tests_metadir, |
27 | configuration: test_conf | 26 | configuration: test_conf |
28 | ) | 27 | ) |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest b/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest index 7d0312005f..ab2df80dcc 100755 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0/run-ptest | |||
@@ -1,11 +1,13 @@ | |||
1 | #! /bin/sh | 1 | #! /bin/sh |
2 | 2 | ||
3 | # Multiply all timeouts by ten so they're more likely to work | 3 | # Multiply all timeouts by five so they're more likely to work |
4 | # on a loaded system. | 4 | # on a loaded system. The default timeout is 20s so this makes it |
5 | # one minute. | ||
5 | export CK_TIMEOUT_MULTIPLIER=5 | 6 | export CK_TIMEOUT_MULTIPLIER=5 |
6 | 7 | ||
7 | # Skip some tests that we know are problematic | 8 | # Skip some tests that we know are problematic |
8 | export GST_CHECKS_IGNORE="" | 9 | export GST_CHECKS_IGNORE="" |
10 | |||
9 | # gstnetclientclock.c:test_functioning is very sensitive to load | 11 | # gstnetclientclock.c:test_functioning is very sensitive to load |
10 | GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_functioning" | 12 | GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_functioning" |
11 | 13 | ||
@@ -13,4 +15,7 @@ GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_functioning" | |||
13 | # https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/410 | 15 | # https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/410 |
14 | GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_infinite_seek_50_src_live" | 16 | GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE,test_infinite_seek_50_src_live" |
15 | 17 | ||
18 | # Known unreliable tests as per subprojects/gst-devtools/validate/launcher/testsuites/check.py: | ||
19 | GST_CHECKS_IGNORE="$GST_CHECKS_IGNORE:parser_pull_short_read" | ||
20 | |||
16 | gnome-desktop-testing-runner gstreamer | 21 | gnome-desktop-testing-runner gstreamer |
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.24.3.bb index 8965497d01..cd04ade602 100644 --- a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.22.11.bb +++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.24.3.bb | |||
@@ -22,7 +22,7 @@ SRC_URI = "https://gstreamer.freedesktop.org/src/gstreamer/gstreamer-${PV}.tar.x | |||
22 | file://0003-tests-use-a-dictionaries-for-environment.patch \ | 22 | file://0003-tests-use-a-dictionaries-for-environment.patch \ |
23 | file://0004-tests-add-helper-script-to-run-the-installed_tests.patch \ | 23 | file://0004-tests-add-helper-script-to-run-the-installed_tests.patch \ |
24 | " | 24 | " |
25 | SRC_URI[sha256sum] = "3d16259e9dab8b002c57ce208a09b350d8282f5b0197306c0cdba9a0d0799744" | 25 | SRC_URI[sha256sum] = "1225ef4a329fae1cadc5ec727dab249ad567e8072879493561ceb91ed34aa414" |
26 | 26 | ||
27 | PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)} \ | 27 | PACKAGECONFIG ??= "${@bb.utils.contains('PTEST_ENABLED', '1', 'tests', '', d)} \ |
28 | check \ | 28 | check \ |
diff --git a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/configure-fix.patch b/meta/recipes-multimedia/libomxil/libomxil-0.9.3/configure-fix.patch deleted file mode 100644 index 876e80ef76..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/configure-fix.patch +++ /dev/null | |||
@@ -1,58 +0,0 @@ | |||
1 | To enable --disable-Werror for libomxil to avoid some compilers which check code strictly. | ||
2 | |||
3 | For example, at least the following errors happened to some compilers: | ||
4 | |||
5 | 1) OMX_INDEXTYPE in include/OMX_Index.h IS NOT OMX_INDEXVENDORTYPE in src/base/omx_base_component.h | ||
6 | | i586-poky-linux-libtool: compile: i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/yocto-build5/poky/build/tmp/sysroots/qemux86 -DHAVE_Climinate-unused-debug-types -Wall -Werror -DCONFIG_DEBUG_LEVEL=0 -c OMXComponentRMExt.c -fPIC -DPIC -o .libs/libomxbase_la-OMXComponentRMExt.o | ||
7 | | omx_base_component.c: In function 'omx_base_component_GetParameter': | ||
8 | | omx_base_component.c:991:3: error: case value '2130706435' not in enumerated type 'OMX_INDEXTYPE' [-Werror=switch] | ||
9 | | omx_base_component.c:918:3: error: case value '2130706436' not in enumerated type 'OMX_INDEXTYPE' [-Werror=switch] | ||
10 | |||
11 | 2) | ||
12 | | i586-poky-linux-gcc -m32 -march=i586 --sysroot=/home/yocto-build5/poky/build/tmp/sysroots/qemux86 -DHAVE_CONFIG_H -I. -I.. -DOMXILCOMPOminate-unused-debug-types -Wall -Werror -DCONFIG_DEBUG_LEVEL=0 -c -o omxregister_bellagio-omxregister.o `test -f 'omxregister.c' || echo './'`o | ||
13 | | omxregister.c: In function 'buildComponentsList': | ||
14 | | omxregister.c:175:7: error: variable 'err' set but not used [-Werror=unused-but-set-variable] | ||
15 | | cc1: all warnings being treated as errors | ||
16 | |||
17 | Upstream-Status: Inappropriate [configuration] | ||
18 | |||
19 | Signed-off-by: Shane Wang <shane.wang@intel.com> | ||
20 | |||
21 | diff -r 82d742d3ea90 configure.ac | ||
22 | --- a/configure.ac Tue Dec 27 15:30:35 2011 +0800 | ||
23 | +++ b/configure.ac Tue Dec 27 16:26:03 2011 +0800 | ||
24 | @@ -5,7 +5,7 @@ | ||
25 | AC_PREREQ([2.59]) | ||
26 | |||
27 | AC_CONFIG_HEADERS([config.h]) | ||
28 | -CFLAGS="${CFLAGS} -Wall -Werror" | ||
29 | +CFLAGS="${CFLAGS} -Wall" | ||
30 | |||
31 | ################################################################################ | ||
32 | # Set the shared versioning info, according to section 6.3 of the libtool info # | ||
33 | @@ -122,6 +122,14 @@ | ||
34 | [with_android=$enableval], | ||
35 | [with_android=no]) | ||
36 | |||
37 | +AC_ARG_ENABLE( | ||
38 | + [Werror], | ||
39 | + [AC_HELP_STRING( | ||
40 | + [--disable-Werror], | ||
41 | + [whether to diable treating gcc warnings as errors])], | ||
42 | + [with_Werror=$enableval], | ||
43 | + [with_Werror=yes]) | ||
44 | + | ||
45 | ################################################################################ | ||
46 | # Check for programs # | ||
47 | ################################################################################ | ||
48 | @@ -193,6 +201,10 @@ | ||
49 | CFG_DEBUG_LEVEL=255 | ||
50 | fi | ||
51 | |||
52 | +if test "x$with_Werror" = "xyes"; then | ||
53 | + CFLAGS="${CFLAGS} -Werror" | ||
54 | +fi | ||
55 | + | ||
56 | AC_SUBST(CFG_DEBUG_LEVEL) | ||
57 | CFLAGS="${CFLAGS} -DCONFIG_DEBUG_LEVEL=$CFG_DEBUG_LEVEL" | ||
58 | |||
diff --git a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/disable-so-versioning.patch b/meta/recipes-multimedia/libomxil/libomxil-0.9.3/disable-so-versioning.patch deleted file mode 100644 index f408e4a621..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/disable-so-versioning.patch +++ /dev/null | |||
@@ -1,36 +0,0 @@ | |||
1 | Disable so versioning since they are really not a versioned shared lib. | ||
2 | |||
3 | Upstream-Status: Submitted @ https://sourceforge.net/p/omxil/bugs/59/ | ||
4 | |||
5 | Signed-off-by: Drew Moseley <drew_moseley@mentor.com> | ||
6 | |||
7 | diff -rub libomxil-bellagio-0.9.3-orig/src/components/audio_effects/Makefile.am libomxil-bellagio-0.9.3/src/components/audio_effects/Makefile.am | ||
8 | --- libomxil-bellagio-0.9.3-orig/src/components/audio_effects/Makefile.am 2014-07-20 15:22:00.858425234 -0400 | ||
9 | +++ libomxil-bellagio-0.9.3/src/components/audio_effects/Makefile.am 2014-07-20 15:25:42.687525225 -0400 | ||
10 | @@ -10,4 +10,5 @@ | ||
11 | libomxaudio_effects_la_CFLAGS = -I$(top_srcdir)/include \ | ||
12 | -I$(top_srcdir)/src \ | ||
13 | -I$(top_srcdir)/src/base | ||
14 | +libomxaudio_effects_la_LDFLAGS = -avoid-version | ||
15 | |||
16 | diff -rub libomxil-bellagio-0.9.3-orig/src/components/clocksrc/Makefile.am libomxil-bellagio-0.9.3/src/components/clocksrc/Makefile.am | ||
17 | --- libomxil-bellagio-0.9.3-orig/src/components/clocksrc/Makefile.am 2014-07-20 15:22:00.858425234 -0400 | ||
18 | +++ libomxil-bellagio-0.9.3/src/components/clocksrc/Makefile.am 2014-07-20 15:24:49.151259753 -0400 | ||
19 | @@ -10,4 +10,4 @@ | ||
20 | -I$(top_srcdir)/include \ | ||
21 | -I$(top_srcdir)/src \ | ||
22 | -I$(top_srcdir)/src/base | ||
23 | - | ||
24 | +libomxclocksrc_la_LDFLAGS = -avoid-version | ||
25 | diff -rub libomxil-bellagio-0.9.3-orig/src/components/videoscheduler/Makefile.am libomxil-bellagio-0.9.3/src/components/videoscheduler/Makefile.am | ||
26 | --- libomxil-bellagio-0.9.3-orig/src/components/videoscheduler/Makefile.am 2014-07-20 15:22:00.862425254 -0400 | ||
27 | +++ libomxil-bellagio-0.9.3/src/components/videoscheduler/Makefile.am 2014-07-20 15:22:36.462601786 -0400 | ||
28 | @@ -6,7 +6,7 @@ | ||
29 | library_entry_point.c | ||
30 | |||
31 | libomxvideosched_la_LIBADD = $(top_builddir)/src/libomxil-bellagio.la | ||
32 | -libomxvideosched_la_LDFLAGS = | ||
33 | +libomxvideosched_la_LDFLAGS = -avoid-version | ||
34 | libomxvideosched_la_CFLAGS = -I$(top_srcdir)/include \ | ||
35 | -I$(top_srcdir)/src \ | ||
36 | -I$(top_srcdir)/src/base | ||
diff --git a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/dynamicloader-linking.patch b/meta/recipes-multimedia/libomxil/libomxil-0.9.3/dynamicloader-linking.patch deleted file mode 100644 index 787953a99b..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/dynamicloader-linking.patch +++ /dev/null | |||
@@ -1,20 +0,0 @@ | |||
1 | This patch fixes link issue when libomxdynamicloader.so is loaded : | ||
2 | Missing symbol RM_Deinit. | ||
3 | |||
4 | This patch comes from "http://pkgs.fedoraproject.org/cgit/libomxil-bellagio.git/commit/?h=f17&id=4996a95828943d345e51ded6876c3103653eecf8" | ||
5 | |||
6 | Upstream-Status: Pending | ||
7 | |||
8 | Signed-off-by: Sébastien Mennetrier <s.mennetrier@innotis.org> | ||
9 | |||
10 | --- a/src/dynamic_loader/Makefile.am 2014-02-25 15:29:10.128549636 +0100 | ||
11 | +++ b/src/dynamic_loader/Makefile.am 2014-02-25 15:30:15.756548808 +0100 | ||
12 | @@ -3,7 +3,7 @@ | ||
13 | omxdynamicloader_LTLIBRARIES = libomxdynamicloader.la | ||
14 | libomxdynamicloader_la_SOURCES = ste_dynamic_component_loader.c ste_dynamic_component_loader.h | ||
15 | |||
16 | -libomxdynamicloader_la_LDFLAGS = | ||
17 | +libomxdynamicloader_la_LDFLAGS = -lomxil-bellagio -L$(top_builddir)/src/.libs | ||
18 | libomxdynamicloader_la_CFLAGS = -I$(top_srcdir)/include \ | ||
19 | -I$(top_srcdir)/src \ | ||
20 | -I$(top_srcdir)/src/base \ | ||
diff --git a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/makefile-docdir-fix.patch b/meta/recipes-multimedia/libomxil/libomxil-0.9.3/makefile-docdir-fix.patch deleted file mode 100644 index dbe8c41f74..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/makefile-docdir-fix.patch +++ /dev/null | |||
@@ -1,19 +0,0 @@ | |||
1 | This patch is to remove DESTDIR in docdir. | ||
2 | Otherwise, when users install by running `make install DESTDIR=/alternate/directory' specified in the file INSTALL, the doc will go into /alternate/directory/alternate/directory, which is not expected. | ||
3 | |||
4 | Upstream-Status: Pending | ||
5 | |||
6 | Signed-off-by: Shane Wang <shane.wang@intel.com> | ||
7 | |||
8 | diff -r 30b597e4e70d Makefile.am | ||
9 | --- a/Makefile.am Wed Dec 28 15:38:35 2011 +0800 | ||
10 | +++ b/Makefile.am Wed Dec 28 15:39:25 2011 +0800 | ||
11 | @@ -7,7 +7,7 @@ | ||
12 | pkgconfigdir = $(libdir)/pkgconfig | ||
13 | pkgconfig_DATA = libomxil-bellagio.pc | ||
14 | |||
15 | -docdir = $(DESTDIR)$(prefix)/share/doc/@PACKAGE@ | ||
16 | +docdir = $(prefix)/share/doc/@PACKAGE@ | ||
17 | doc_DATA = README \ | ||
18 | ChangeLog \ | ||
19 | TODO | ||
diff --git a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/parallel-make.patch b/meta/recipes-multimedia/libomxil/libomxil-0.9.3/parallel-make.patch deleted file mode 100644 index 483ca1328b..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil-0.9.3/parallel-make.patch +++ /dev/null | |||
@@ -1,18 +0,0 @@ | |||
1 | This patch is to make libomxil Makefile support "make -jN". | ||
2 | The omxregister_bellagio stuffs depend on libomxil_bellagio library. | ||
3 | |||
4 | Upstream-Status: Pending | ||
5 | |||
6 | Signed-off-by: Shane Wang <shane.wang@intel.com> | ||
7 | |||
8 | diff -r f59d077d3dd5 Makefile.am | ||
9 | --- a/src/Makefile.am Wed Dec 28 10:54:36 2011 +0800 | ||
10 | +++ b/src/Makefile.am Wed Dec 28 10:55:46 2011 +0800 | ||
11 | @@ -7,6 +7,7 @@ | ||
12 | omxregister_bellagio_SOURCES = omxregister.c common.c common.h | ||
13 | omxregister_bellagio_CFLAGS = -DOMXILCOMPONENTSPATH=\"$(plugindir)/\" \ | ||
14 | -I$(top_srcdir)/include | ||
15 | +omxregister_bellagio_LDADD = $(lib_LTLIBRARIES) | ||
16 | omxregister_bellagio_LDFLAGS = -lomxil-bellagio -L$(builddir) | ||
17 | |||
18 | lib_LTLIBRARIES = libomxil-bellagio.la | ||
diff --git a/meta/recipes-multimedia/libomxil/libomxil_0.9.3.bb b/meta/recipes-multimedia/libomxil/libomxil_0.9.3.bb deleted file mode 100644 index 4564d619ae..0000000000 --- a/meta/recipes-multimedia/libomxil/libomxil_0.9.3.bb +++ /dev/null | |||
@@ -1,45 +0,0 @@ | |||
1 | SUMMARY = "Bellagio OpenMAX Integration Layer (IL)" | ||
2 | DESCRIPTION = "Bellagio is an opensource implementation of the Khronos OpenMAX \ | ||
3 | Integration Layer API to access multimedia components." | ||
4 | HOMEPAGE = "http://omxil.sourceforge.net/" | ||
5 | |||
6 | LICENSE = "LGPL-2.1-or-later" | ||
7 | LICENSE_FLAGS = "${@bb.utils.contains('PACKAGECONFIG', 'amr', 'commercial', '', d)}" | ||
8 | LIC_FILES_CHKSUM = "file://COPYING;md5=ae6f0f4dbc7ac193b50f323a6ae191cb \ | ||
9 | file://src/omxcore.h;beginline=1;endline=27;md5=806b1e5566c06486fe8e42b461e03a90" | ||
10 | |||
11 | SRC_URI = "${SOURCEFORGE_MIRROR}/omxil/libomxil-bellagio-${PV}.tar.gz \ | ||
12 | file://configure-fix.patch \ | ||
13 | file://parallel-make.patch \ | ||
14 | file://makefile-docdir-fix.patch \ | ||
15 | file://dynamicloader-linking.patch \ | ||
16 | file://disable-so-versioning.patch" | ||
17 | |||
18 | SRC_URI[md5sum] = "a1de827fdb75c02c84e55f740ca27cb8" | ||
19 | SRC_URI[sha256sum] = "593c0729c8ef8c1467b3bfefcf355ec19a46dd92e31bfc280e17d96b0934d74c" | ||
20 | |||
21 | S = "${WORKDIR}/${BPN}-bellagio-${PV}" | ||
22 | |||
23 | inherit autotools | ||
24 | |||
25 | EXTRA_OECONF += "--disable-doc --disable-Werror" | ||
26 | |||
27 | PROVIDES += "virtual/libomxil" | ||
28 | |||
29 | CFLAGS += "-fcommon" | ||
30 | |||
31 | PACKAGECONFIG ??= "" | ||
32 | |||
33 | PACKAGECONFIG[amr] = "--enable-amr,," | ||
34 | |||
35 | # | ||
36 | # The .so files under ${libdir}/bellagio are not intended to be versioned and symlinked. | ||
37 | # Make sure they get packaged in the main package. | ||
38 | # | ||
39 | FILES:${PN} += "${libdir}/bellagio/*.so \ | ||
40 | ${libdir}/omxloaders/*${SOLIBS}" | ||
41 | FILES:${PN}-staticdev += "${libdir}/bellagio/*.a \ | ||
42 | ${libdir}/omxloaders/*.a" | ||
43 | FILES:${PN}-dev += "${libdir}/bellagio/*.la \ | ||
44 | ${libdir}/omxloaders/*.la \ | ||
45 | ${libdir}/omxloaders/*${SOLIBSDEV}" | ||
diff --git a/meta/recipes-multimedia/libvorbis/libvorbis/0001-configure-Check-for-clang.patch b/meta/recipes-multimedia/libvorbis/libvorbis/0001-configure-Check-for-clang.patch index b06029b98b..d4fac605b6 100644 --- a/meta/recipes-multimedia/libvorbis/libvorbis/0001-configure-Check-for-clang.patch +++ b/meta/recipes-multimedia/libvorbis/libvorbis/0001-configure-Check-for-clang.patch | |||
@@ -5,9 +5,9 @@ Subject: [PATCH] configure: Check for clang | |||
5 | 5 | ||
6 | Disable gcc specific options if using clang | 6 | Disable gcc specific options if using clang |
7 | 7 | ||
8 | Upstream-Status: Inactive-Upstream [https://gitlab.xiph.org/xiph/vorbis,https://github.com/xiph/vorbis] | ||
8 | Signed-off-by: Khem Raj <raj.khem@gmail.com> | 9 | Signed-off-by: Khem Raj <raj.khem@gmail.com> |
9 | --- | 10 | --- |
10 | Upstream-Status: Pending | ||
11 | 11 | ||
12 | configure.ac | 19 +++++++++++++++++-- | 12 | configure.ac | 19 +++++++++++++++++-- |
13 | 1 file changed, 17 insertions(+), 2 deletions(-) | 13 | 1 file changed, 17 insertions(+), 2 deletions(-) |
diff --git a/meta/recipes-multimedia/x264/x264/Fix-X32-build-by-disabling-asm.patch b/meta/recipes-multimedia/x264/x264/Fix-X32-build-by-disabling-asm.patch deleted file mode 100644 index cb771fb0bf..0000000000 --- a/meta/recipes-multimedia/x264/x264/Fix-X32-build-by-disabling-asm.patch +++ /dev/null | |||
@@ -1,51 +0,0 @@ | |||
1 | From 7bc25f4d1aaa5186d2eff3e2326c7245fcd7e7f3 Mon Sep 17 00:00:00 2001 | ||
2 | From: Christopher Larson <chris_larson@mentor.com> | ||
3 | Date: Tue, 13 Dec 2016 14:22:32 -0700 | ||
4 | Subject: [PATCH] Fix X32 build by disabling asm | ||
5 | |||
6 | This applies gentoo's x32 patch, adjusted slightly, which disables asm support | ||
7 | for x32 as well as correcting -m. | ||
8 | |||
9 | Debian has a different patch which does the same, and there's a superior yet | ||
10 | out of date patch series on the x264 list which keeps asm support enabled, but | ||
11 | doesn't successfully build at this time, and my assembly is very rusty. | ||
12 | |||
13 | Upstream-Status: Pending | ||
14 | Signed-off-by: Christopher Larson <chris_larson@mentor.com> | ||
15 | |||
16 | --- | ||
17 | configure | 14 ++++++++++++-- | ||
18 | 1 file changed, 12 insertions(+), 2 deletions(-) | ||
19 | |||
20 | diff --git a/configure b/configure | ||
21 | index 51b128d..6ea9469 100755 | ||
22 | --- a/configure | ||
23 | +++ b/configure | ||
24 | @@ -754,7 +754,13 @@ case $host_cpu in | ||
25 | AS_EXT=".asm" | ||
26 | ASFLAGS="$ASFLAGS -DARCH_X86_64=1 -I\$(SRCPATH)/common/x86/" | ||
27 | stack_alignment=16 | ||
28 | - [ $compiler = GNU ] && CFLAGS="-m64 $CFLAGS" && LDFLAGS="-m64 $LDFLAGS" | ||
29 | + if [ $compiler = GNU ]; then | ||
30 | + if cpp_check "" "" "__ILP32__" ; then | ||
31 | + CFLAGS="-mx32 $CFLAGS" && LDFLAGS="-mx32 $LDFLAGS" | ||
32 | + else | ||
33 | + CFLAGS="-m64 $CFLAGS" && LDFLAGS="-m64 $LDFLAGS" | ||
34 | + fi | ||
35 | + fi | ||
36 | if [ "$SYS" = MACOSX ]; then | ||
37 | ASFLAGS="$ASFLAGS -f macho64 -DPREFIX" | ||
38 | if cc_check '' "-arch x86_64"; then | ||
39 | @@ -773,7 +779,11 @@ case $host_cpu in | ||
40 | RCFLAGS="--target=pe-x86-64 $RCFLAGS" | ||
41 | fi | ||
42 | else | ||
43 | - ASFLAGS="$ASFLAGS -f elf64" | ||
44 | + if cpp_check "" "" "__ILP32__" ; then | ||
45 | + asm=no | ||
46 | + else | ||
47 | + ASFLAGS="$ASFLAGS -f elf64" | ||
48 | + fi | ||
49 | fi | ||
50 | ;; | ||
51 | powerpc*) | ||
diff --git a/meta/recipes-multimedia/x264/x264/don-t-default-to-cortex-a9-with-neon.patch b/meta/recipes-multimedia/x264/x264/don-t-default-to-cortex-a9-with-neon.patch deleted file mode 100644 index 065e3b35b7..0000000000 --- a/meta/recipes-multimedia/x264/x264/don-t-default-to-cortex-a9-with-neon.patch +++ /dev/null | |||
@@ -1,33 +0,0 @@ | |||
1 | From a72bf499a0674fc75eedf15008b424e28f67e4bd Mon Sep 17 00:00:00 2001 | ||
2 | From: Andrei Gherzan <andrei@gherzan.ro> | ||
3 | Date: Fri, 2 Feb 2018 15:10:08 +0200 | ||
4 | Subject: [PATCH] dont default to cortex-a9 with neon | ||
5 | |||
6 | -march flag is not in CFLAGS so this will always default to | ||
7 | -mcpu=cortex-a8 -mfpu=neon. | ||
8 | |||
9 | Upstream-Status: Pending | ||
10 | |||
11 | Signed-off-by: Andrei Gherzan <andrei@gherzan.ro> | ||
12 | Signed-off-by: Maxin B. John <maxin.john@intel.com> | ||
13 | --- | ||
14 | configure | 3 --- | ||
15 | 1 file changed, 3 deletions(-) | ||
16 | |||
17 | diff --git a/configure b/configure | ||
18 | index 0e3ef23..955b993 100755 | ||
19 | --- a/configure | ||
20 | +++ b/configure | ||
21 | @@ -911,9 +911,6 @@ if [ $asm = auto -a \( $ARCH = X86 -o $ARCH = X86_64 \) ] ; then | ||
22 | fi | ||
23 | |||
24 | if [ $asm = auto -a $ARCH = ARM ] ; then | ||
25 | - # set flags so neon is built by default | ||
26 | - [ $compiler == CL ] || echo $CFLAGS | grep -Eq '(-mcpu|-march|-mfpu)' || CFLAGS="$CFLAGS -mcpu=cortex-a8 -mfpu=neon" | ||
27 | - | ||
28 | cc_check '' '' '__asm__("add r0, r1, r2");' && define HAVE_ARM_INLINE_ASM | ||
29 | if [ $compiler = CL ] && cpp_check '' '' 'defined(_M_ARM) && _M_ARM >= 7' ; then | ||
30 | define HAVE_ARMV6 | ||
31 | -- | ||
32 | 2.4.0 | ||
33 | |||
diff --git a/meta/recipes-multimedia/x264/x264_git.bb b/meta/recipes-multimedia/x264/x264_git.bb index e7d9e75e8d..fae88d24d1 100644 --- a/meta/recipes-multimedia/x264/x264_git.bb +++ b/meta/recipes-multimedia/x264/x264_git.bb | |||
@@ -8,13 +8,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" | |||
8 | 8 | ||
9 | DEPENDS = "nasm-native" | 9 | DEPENDS = "nasm-native" |
10 | 10 | ||
11 | SRC_URI = "git://github.com/mirror/x264;branch=stable;protocol=https \ | 11 | SRC_URI = "git://code.videolan.org/videolan/x264.git;branch=stable;protocol=https \ |
12 | file://don-t-default-to-cortex-a9-with-neon.patch \ | ||
13 | file://Fix-X32-build-by-disabling-asm.patch \ | ||
14 | " | 12 | " |
15 | UPSTREAM_CHECK_COMMITS = "1" | 13 | UPSTREAM_CHECK_COMMITS = "1" |
16 | 14 | ||
17 | SRCREV = "baee400fa9ced6f5481a728138fed6e867b0ff7f" | 15 | SRCREV = "31e19f92f00c7003fa115047ce50978bc98c3a0d" |
18 | 16 | ||
19 | PV = "r3039+git" | 17 | PV = "r3039+git" |
20 | 18 | ||
@@ -41,6 +39,7 @@ EXTRA_OECONF = '--prefix=${prefix} \ | |||
41 | --disable-opencl \ | 39 | --disable-opencl \ |
42 | --enable-pic \ | 40 | --enable-pic \ |
43 | ${X264_DISABLE_ASM} \ | 41 | ${X264_DISABLE_ASM} \ |
42 | --extra-cflags="${TUNE_CCARGS}" \ | ||
44 | ' | 43 | ' |
45 | 44 | ||
46 | do_configure() { | 45 | do_configure() { |
@@ -54,3 +53,5 @@ do_install() { | |||
54 | } | 53 | } |
55 | 54 | ||
56 | AS[unexport] = "1" | 55 | AS[unexport] = "1" |
56 | |||
57 | COMPATIBLE_HOST:x86-x32 = "null" | ||
diff --git a/meta/recipes-rt/rt-tests/rt-tests_git.bb b/meta/recipes-rt/rt-tests/rt-tests_git.bb index ef082131e2..1f48e143d4 100644 --- a/meta/recipes-rt/rt-tests/rt-tests_git.bb +++ b/meta/recipes-rt/rt-tests/rt-tests_git.bb | |||
@@ -27,7 +27,7 @@ do_install() { | |||
27 | } | 27 | } |
28 | 28 | ||
29 | do_install_ptest() { | 29 | do_install_ptest() { |
30 | cp ${WORKDIR}/rt_bmark.py ${D}${PTEST_PATH} | 30 | cp ${UNPACKDIR}/rt_bmark.py ${D}${PTEST_PATH} |
31 | } | 31 | } |
32 | 32 | ||
33 | RDEPENDS:${PN}-ptest += " stress-ng python3 python3-multiprocessing python3-datetime python3-misc" | 33 | RDEPENDS:${PN}-ptest += " stress-ng python3 python3-multiprocessing python3-datetime python3-misc" |
diff --git a/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb b/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb index 90734b6bd0..bb7ddbc7f4 100644 --- a/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb +++ b/meta/recipes-sato/matchbox-sato/matchbox-session-sato_0.1.bb | |||
@@ -19,7 +19,9 @@ REQUIRED_DISTRO_FEATURES = "x11" | |||
19 | 19 | ||
20 | SRC_URI = "file://session \ | 20 | SRC_URI = "file://session \ |
21 | file://index.theme" | 21 | file://index.theme" |
22 | S = "${WORKDIR}" | 22 | |
23 | S = "${WORKDIR}/sources" | ||
24 | UNPACKDIR = "${S}" | ||
23 | 25 | ||
24 | FILES:${PN} += "${datadir}/themes/Sato/index.theme" | 26 | FILES:${PN} += "${datadir}/themes/Sato/index.theme" |
25 | 27 | ||
diff --git a/meta/recipes-sato/pulseaudio-sato/pulseaudio-client-conf-sato_1.bb b/meta/recipes-sato/pulseaudio-sato/pulseaudio-client-conf-sato_1.bb index ec187ed9bd..20368703c4 100644 --- a/meta/recipes-sato/pulseaudio-sato/pulseaudio-client-conf-sato_1.bb +++ b/meta/recipes-sato/pulseaudio-sato/pulseaudio-client-conf-sato_1.bb | |||
@@ -4,7 +4,9 @@ LICENSE = "MIT" | |||
4 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" | 4 | LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" |
5 | 5 | ||
6 | SRC_URI = "file://50-sato.conf" | 6 | SRC_URI = "file://50-sato.conf" |
7 | S = "${WORKDIR}" | 7 | |
8 | S = "${WORKDIR}/sources" | ||
9 | UNPACKDIR = "${S}" | ||
8 | 10 | ||
9 | inherit allarch | 11 | inherit allarch |
10 | 12 | ||
diff --git a/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb b/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb index 766f574b25..8c6c0edf10 100644 --- a/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb +++ b/meta/recipes-sato/shutdown-desktop/shutdown-desktop.bb | |||
@@ -5,7 +5,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384 | |||
5 | SRC_URI = "file://shutdown.desktop" | 5 | SRC_URI = "file://shutdown.desktop" |
6 | 6 | ||
7 | 7 | ||
8 | S = "${WORKDIR}" | 8 | S = "${WORKDIR}/sources" |
9 | UNPACKDIR = "${S}" | ||
9 | 10 | ||
10 | do_install() { | 11 | do_install() { |
11 | install -d ${D}${datadir}/applications | 12 | install -d ${D}${datadir}/applications |
diff --git a/meta/recipes-support/appstream/appstream_1.0.2.bb b/meta/recipes-support/appstream/appstream_1.0.3.bb index 7eb12a04c5..625e85a0ae 100644 --- a/meta/recipes-support/appstream/appstream_1.0.2.bb +++ b/meta/recipes-support/appstream/appstream_1.0.3.bb | |||
@@ -28,7 +28,7 @@ SRC_URI = " \ | |||
28 | https://www.freedesktop.org/software/appstream/releases/AppStream-${PV}.tar.xz \ | 28 | https://www.freedesktop.org/software/appstream/releases/AppStream-${PV}.tar.xz \ |
29 | file://0001-remove-hardcoded-path.patch \ | 29 | file://0001-remove-hardcoded-path.patch \ |
30 | " | 30 | " |
31 | SRC_URI[sha256sum] = "1a5148ca97dcbf5eb6e9c380278bb0d20938569292ea8652df1b3cac8bd2736b" | 31 | SRC_URI[sha256sum] = "5ab6f6cf644e7875a9508593962e56bb430f4e59ae0bf03be6be7029deb6baa4" |
32 | 32 | ||
33 | S = "${WORKDIR}/AppStream-${PV}" | 33 | S = "${WORKDIR}/AppStream-${PV}" |
34 | 34 | ||
diff --git a/meta/recipes-support/bash-completion/bash-completion_2.13.0.bb b/meta/recipes-support/bash-completion/bash-completion_2.14.0.bb index f75d61e219..06c81beaa0 100644 --- a/meta/recipes-support/bash-completion/bash-completion_2.13.0.bb +++ b/meta/recipes-support/bash-completion/bash-completion_2.14.0.bb | |||
@@ -14,7 +14,7 @@ SECTION = "console/utils" | |||
14 | 14 | ||
15 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz" | 15 | SRC_URI = "${GITHUB_BASE_URI}/download/${PV}/${BPN}-${PV}.tar.xz" |
16 | 16 | ||
17 | SRC_URI[sha256sum] = "c5f99a39e40f0d154c03ff15438e87ece1f5ac666336a4459899e2ff4bedf3d1" | 17 | SRC_URI[sha256sum] = "5c7494f968280832d6adb5aa19f745a56f1a79df311e59338c5efa6f7285e168" |
18 | GITHUB_BASE_URI = "https://github.com/scop/bash-completion/releases" | 18 | GITHUB_BASE_URI = "https://github.com/scop/bash-completion/releases" |
19 | 19 | ||
20 | PARALLEL_MAKE = "" | 20 | PARALLEL_MAKE = "" |
diff --git a/meta/recipes-support/boost/boost-1.84.0.inc b/meta/recipes-support/boost/boost-1.85.0.inc index 5bbea2ba5b..c2380bba6e 100644 --- a/meta/recipes-support/boost/boost-1.84.0.inc +++ b/meta/recipes-support/boost/boost-1.85.0.inc | |||
@@ -12,7 +12,7 @@ BOOST_MAJ = "${@"_".join(d.getVar("PV").split(".")[0:2])}" | |||
12 | BOOST_P = "boost_${BOOST_VER}" | 12 | BOOST_P = "boost_${BOOST_VER}" |
13 | 13 | ||
14 | SRC_URI = "https://boostorg.jfrog.io/artifactory/main/release/${PV}/source/${BOOST_P}.tar.bz2" | 14 | SRC_URI = "https://boostorg.jfrog.io/artifactory/main/release/${PV}/source/${BOOST_P}.tar.bz2" |
15 | SRC_URI[sha256sum] = "cc4b893acf645c9d4b698e9a0f08ca8846aa5d6c68275c14c3e7949c24109454" | 15 | SRC_URI[sha256sum] = "7009fe1faa1697476bdc7027703a2badb84e849b7b0baad5086b087b971f8617" |
16 | 16 | ||
17 | UPSTREAM_CHECK_URI = "http://www.boost.org/users/download/" | 17 | UPSTREAM_CHECK_URI = "http://www.boost.org/users/download/" |
18 | UPSTREAM_CHECK_REGEX = "release/(?P<pver>.*)/source/" | 18 | UPSTREAM_CHECK_REGEX = "release/(?P<pver>.*)/source/" |
diff --git a/meta/recipes-support/boost/boost-build-native_1.84.0.bb b/meta/recipes-support/boost/boost-build-native_1.85.0.bb index a345bac499..b333c7c81a 100644 --- a/meta/recipes-support/boost/boost-build-native_1.84.0.bb +++ b/meta/recipes-support/boost/boost-build-native_1.85.0.bb | |||
@@ -7,7 +7,7 @@ LICENSE = "BSL-1.0" | |||
7 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=e4224ccaecb14d942c71d31bef20d78c" | 7 | LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=e4224ccaecb14d942c71d31bef20d78c" |
8 | 8 | ||
9 | SRC_URI = "git://github.com/boostorg/build;protocol=https;branch=master" | 9 | SRC_URI = "git://github.com/boostorg/build;protocol=https;branch=master" |
10 | SRCREV = "8d86b9a85407d73d6e8c631771f18c2a237d2d71" | 10 | SRCREV = "b0311a0d4f3bb0f93d06d3c6faca510c7f5a2012" |
11 | PE = "1" | 11 | PE = "1" |
12 | 12 | ||
13 | UPSTREAM_CHECK_GITTAGREGEX = "boost-(?P<pver>(\d+(\.\d+)+))" | 13 | UPSTREAM_CHECK_GITTAGREGEX = "boost-(?P<pver>(\d+(\.\d+)+))" |
diff --git a/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch b/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch index 9ba5d5cc33..fffc675b08 100644 --- a/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch +++ b/meta/recipes-support/boost/boost/0001-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 18185e939ca121335a6172c8e50872af8122a759 Mon Sep 17 00:00:00 2001 | 1 | From 0130a1c0d8ba3c0d59258cb79e8b4a04fe6d8eec Mon Sep 17 00:00:00 2001 |
2 | From: Alexander Kanavin <alex.kanavin@gmail.com> | 2 | From: Alexander Kanavin <alex.kanavin@gmail.com> |
3 | Date: Tue, 18 Dec 2018 15:42:57 +0100 | 3 | Date: Tue, 18 Dec 2018 15:42:57 +0100 |
4 | Subject: [PATCH] Don't set up arch/instruction-set flags, we do that | 4 | Subject: [PATCH] Don't set up arch/instruction-set flags, we do that |
@@ -13,10 +13,10 @@ Signed-off-by: Alexander Kanavin <alex.kanavin@gmail.com> | |||
13 | 1 file changed, 153 deletions(-) | 13 | 1 file changed, 153 deletions(-) |
14 | 14 | ||
15 | diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam | 15 | diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam |
16 | index 834f5e1bf..493a43e6d 100644 | 16 | index e5f8f53a4..0186545e8 100644 |
17 | --- a/tools/build/src/tools/gcc.jam | 17 | --- a/tools/build/src/tools/gcc.jam |
18 | +++ b/tools/build/src/tools/gcc.jam | 18 | +++ b/tools/build/src/tools/gcc.jam |
19 | @@ -1113,156 +1113,3 @@ local rule cpu-flags ( toolset variable : architecture : instruction-set + : | 19 | @@ -1111,156 +1111,3 @@ local rule cpu-flags ( toolset variable : architecture : instruction-set + : |
20 | <architecture>$(architecture)/<instruction-set>$(instruction-set) | 20 | <architecture>$(architecture)/<instruction-set>$(instruction-set) |
21 | : $(values) ; | 21 | : $(values) ; |
22 | } | 22 | } |
diff --git a/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch b/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch index 913810a340..1b01a04c6d 100644 --- a/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch +++ b/meta/recipes-support/boost/boost/0001-dont-setup-compiler-flags-m32-m64.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From bbb0845c0a79238fb2e84cca41826a0944b6ce7e Mon Sep 17 00:00:00 2001 | 1 | From 9c3d6fa3544ca7bd6d8c4f4edbb62282cbc35861 Mon Sep 17 00:00:00 2001 |
2 | From: Anuj Mittal <anuj.mittal@intel.com> | 2 | From: Anuj Mittal <anuj.mittal@intel.com> |
3 | Date: Thu, 14 Nov 2019 10:13:53 +0800 | 3 | Date: Thu, 14 Nov 2019 10:13:53 +0800 |
4 | Subject: [PATCH] dont setup compiler flags -m32/-m64 | 4 | Subject: [PATCH] dont setup compiler flags -m32/-m64 |
@@ -13,7 +13,7 @@ Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | |||
13 | 1 file changed, 14 deletions(-) | 13 | 1 file changed, 14 deletions(-) |
14 | 14 | ||
15 | diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam | 15 | diff --git a/tools/build/src/tools/gcc.jam b/tools/build/src/tools/gcc.jam |
16 | index 493a43e6d..42dccbdfe 100644 | 16 | index 0186545e8..ba33c4086 100644 |
17 | --- a/tools/build/src/tools/gcc.jam | 17 | --- a/tools/build/src/tools/gcc.jam |
18 | +++ b/tools/build/src/tools/gcc.jam | 18 | +++ b/tools/build/src/tools/gcc.jam |
19 | @@ -360,20 +360,6 @@ local rule compile-link-flags ( * ) | 19 | @@ -360,20 +360,6 @@ local rule compile-link-flags ( * ) |
diff --git a/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch b/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch index 9b6fcc6358..9cb671e013 100644 --- a/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch +++ b/meta/recipes-support/boost/boost/boost-math-disable-pch-for-gcc.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From d6864bd78c01c5e8578805a7b67555c70a0e99a2 Mon Sep 17 00:00:00 2001 | 1 | From a61dd2b1336c06d232df58212ece88067e8a80bb Mon Sep 17 00:00:00 2001 |
2 | From: Jackie Huang <jackie.huang@windriver.com> | 2 | From: Jackie Huang <jackie.huang@windriver.com> |
3 | Date: Fri, 23 Sep 2016 01:04:50 -0700 | 3 | Date: Fri, 23 Sep 2016 01:04:50 -0700 |
4 | Subject: [PATCH] boost-math: disable pch for gcc | 4 | Subject: [PATCH] boost-math: disable pch for gcc |
@@ -14,13 +14,13 @@ Signed-off-by: Jackie Huang <jackie.huang@windriver.com> | |||
14 | 1 file changed, 1 insertion(+), 1 deletion(-) | 14 | 1 file changed, 1 insertion(+), 1 deletion(-) |
15 | 15 | ||
16 | diff --git a/libs/math/build/Jamfile.v2 b/libs/math/build/Jamfile.v2 | 16 | diff --git a/libs/math/build/Jamfile.v2 b/libs/math/build/Jamfile.v2 |
17 | index fd840287e..5aff7d724 100644 | 17 | index 6549e06b7..342cbf0b1 100644 |
18 | --- a/libs/math/build/Jamfile.v2 | 18 | --- a/libs/math/build/Jamfile.v2 |
19 | +++ b/libs/math/build/Jamfile.v2 | 19 | +++ b/libs/math/build/Jamfile.v2 |
20 | @@ -14,7 +14,7 @@ project | 20 | @@ -13,7 +13,7 @@ project |
21 | <toolset>intel-win:<linkflags>-nologo | ||
21 | #<toolset>intel-linux:<pch>off | 22 | #<toolset>intel-linux:<pch>off |
22 | <toolset>intel-darwin:<pch>off | 23 | <toolset>intel-darwin:<pch>off |
23 | <toolset>msvc-7.1:<pch>off | ||
24 | - <toolset>gcc,<target-os>windows:<pch>off | 24 | - <toolset>gcc,<target-os>windows:<pch>off |
25 | + <toolset>gcc:<pch>off | 25 | + <toolset>gcc:<pch>off |
26 | #<toolset>gcc:<cxxflags>-fvisibility=hidden | 26 | #<toolset>gcc:<cxxflags>-fvisibility=hidden |
diff --git a/meta/recipes-support/boost/boost_1.84.0.bb b/meta/recipes-support/boost/boost_1.85.0.bb index 4b580d078b..4b580d078b 100644 --- a/meta/recipes-support/boost/boost_1.84.0.bb +++ b/meta/recipes-support/boost/boost_1.85.0.bb | |||
diff --git a/meta/recipes-support/curl/curl/721941aadf4adf4f6aeb3f4c0ab489bb89610c36.patch b/meta/recipes-support/curl/curl/721941aadf4adf4f6aeb3f4c0ab489bb89610c36.patch deleted file mode 100644 index 98f7db93e8..0000000000 --- a/meta/recipes-support/curl/curl/721941aadf4adf4f6aeb3f4c0ab489bb89610c36.patch +++ /dev/null | |||
@@ -1,64 +0,0 @@ | |||
1 | From 721941aadf4adf4f6aeb3f4c0ab489bb89610c36 Mon Sep 17 00:00:00 2001 | ||
2 | From: Stefan Eissing <stefan@eissing.org> | ||
3 | Date: Mon, 1 Apr 2024 15:41:18 +0200 | ||
4 | Subject: [PATCH] http: with chunked POST forced, disable length check on read | ||
5 | callback | ||
6 | |||
7 | - when an application forces HTTP/1.1 chunked transfer encoding | ||
8 | by setting the corresponding header and instructs curl to use | ||
9 | the CURLOPT_READFUNCTION, disregard any POST length information. | ||
10 | - this establishes backward compatibility with previous curl versions | ||
11 | |||
12 | Applications are encouraged to not force "chunked", but rather | ||
13 | set length information for a POST. By setting -1, curl will | ||
14 | auto-select chunked on HTTP/1.1 and work properly on other HTTP | ||
15 | versions. | ||
16 | |||
17 | Reported-by: Jeff King | ||
18 | Fixes #13229 | ||
19 | Closes #13257 | ||
20 | Upstream-Status: Backport | ||
21 | --- | ||
22 | lib/http.c | 22 ++++++++++++++++++++-- | ||
23 | 1 file changed, 20 insertions(+), 2 deletions(-) | ||
24 | |||
25 | diff --git a/lib/http.c b/lib/http.c | ||
26 | index 92c04e69cd8373..a764d3c4403c39 100644 | ||
27 | --- a/lib/http.c | ||
28 | +++ b/lib/http.c | ||
29 | @@ -2046,8 +2046,19 @@ static CURLcode set_reader(struct Curl_easy *data, Curl_HttpReq httpreq) | ||
30 | else | ||
31 | result = Curl_creader_set_null(data); | ||
32 | } | ||
33 | - else { /* we read the bytes from the callback */ | ||
34 | - result = Curl_creader_set_fread(data, postsize); | ||
35 | + else { | ||
36 | + /* we read the bytes from the callback. In case "chunked" encoding | ||
37 | + * is forced by the application, we disregard `postsize`. This is | ||
38 | + * a backward compatibility decision to earlier versions where | ||
39 | + * chunking disregarded this. See issue #13229. */ | ||
40 | + bool chunked = FALSE; | ||
41 | + char *ptr = Curl_checkheaders(data, STRCONST("Transfer-Encoding")); | ||
42 | + if(ptr) { | ||
43 | + /* Some kind of TE is requested, check if 'chunked' is chosen */ | ||
44 | + chunked = Curl_compareheader(ptr, STRCONST("Transfer-Encoding:"), | ||
45 | + STRCONST("chunked")); | ||
46 | + } | ||
47 | + result = Curl_creader_set_fread(data, chunked? -1 : postsize); | ||
48 | } | ||
49 | return result; | ||
50 | |||
51 | @@ -2115,6 +2126,13 @@ CURLcode Curl_http_req_set_reader(struct Curl_easy *data, | ||
52 | data->req.upload_chunky = | ||
53 | Curl_compareheader(ptr, | ||
54 | STRCONST("Transfer-Encoding:"), STRCONST("chunked")); | ||
55 | + if(data->req.upload_chunky && | ||
56 | + Curl_use_http_1_1plus(data, data->conn) && | ||
57 | + (data->conn->httpversion >= 20)) { | ||
58 | + infof(data, "suppressing chunked transfer encoding on connection " | ||
59 | + "using HTTP version 2 or higher"); | ||
60 | + data->req.upload_chunky = FALSE; | ||
61 | + } | ||
62 | } | ||
63 | else { | ||
64 | curl_off_t req_clen = Curl_creader_total_length(data); | ||
diff --git a/meta/recipes-support/curl/curl/disable-tests b/meta/recipes-support/curl/curl/disable-tests index 259576fd01..c4aa7783a1 100644 --- a/meta/recipes-support/curl/curl/disable-tests +++ b/meta/recipes-support/curl/curl/disable-tests | |||
@@ -39,3 +39,4 @@ | |||
39 | 1404 | 39 | 1404 |
40 | 1405 | 40 | 1405 |
41 | 1465 | 41 | 1465 |
42 | 1481 | ||
diff --git a/meta/recipes-support/curl/curl/run-ptest b/meta/recipes-support/curl/curl/run-ptest index 3d25f3d90b..579b3f4587 100644 --- a/meta/recipes-support/curl/curl/run-ptest +++ b/meta/recipes-support/curl/curl/run-ptest | |||
@@ -7,5 +7,7 @@ cd tests | |||
7 | # Use automake-style output | 7 | # Use automake-style output |
8 | # Run four tests in parallel | 8 | # Run four tests in parallel |
9 | # Print log output on failure | 9 | # Print log output on failure |
10 | |||
10 | # Don't run the flaky or timing dependent tests | 11 | # Don't run the flaky or timing dependent tests |
11 | ./runtests.pl -a -n -am -j4 -p !flaky !timing-dependent | 12 | # Until https://github.com/curl/curl/issues/13350 is resolved, don't run FTP tests |
13 | ./runtests.pl -a -n -am -j4 -p !flaky !timing-dependent !FTP | ||
diff --git a/meta/recipes-support/curl/curl_8.7.1.bb b/meta/recipes-support/curl/curl_8.8.0.bb index c74416d7e9..b932594159 100644 --- a/meta/recipes-support/curl/curl_8.7.1.bb +++ b/meta/recipes-support/curl/curl_8.8.0.bb | |||
@@ -11,12 +11,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=eed2e5088e1ac619c9a1c747da291d75" | |||
11 | 11 | ||
12 | SRC_URI = " \ | 12 | SRC_URI = " \ |
13 | https://curl.se/download/${BP}.tar.xz \ | 13 | https://curl.se/download/${BP}.tar.xz \ |
14 | file://721941aadf4adf4f6aeb3f4c0ab489bb89610c36.patch \ | ||
15 | file://run-ptest \ | 14 | file://run-ptest \ |
16 | file://disable-tests \ | 15 | file://disable-tests \ |
17 | file://no-test-timeout.patch \ | 16 | file://no-test-timeout.patch \ |
18 | " | 17 | " |
19 | SRC_URI[sha256sum] = "6fea2aac6a4610fbd0400afb0bcddbe7258a64c63f1f68e5855ebc0c659710cd" | 18 | SRC_URI[sha256sum] = "0f58bb95fc330c8a46eeb3df5701b0d90c9d9bfcc42bd1cd08791d12551d4400" |
20 | 19 | ||
21 | # Curl has used many names over the years... | 20 | # Curl has used many names over the years... |
22 | CVE_PRODUCT = "haxx:curl haxx:libcurl curl:curl curl:libcurl libcurl:libcurl daniel_stenberg:curl" | 21 | CVE_PRODUCT = "haxx:curl haxx:libcurl curl:curl curl:libcurl libcurl:libcurl daniel_stenberg:curl" |
@@ -73,7 +72,6 @@ PACKAGECONFIG[zstd] = "--with-zstd,--without-zstd,zstd" | |||
73 | 72 | ||
74 | EXTRA_OECONF = " \ | 73 | EXTRA_OECONF = " \ |
75 | --disable-libcurl-option \ | 74 | --disable-libcurl-option \ |
76 | --disable-ntlm-wb \ | ||
77 | --with-ca-bundle=${sysconfdir}/ssl/certs/ca-certificates.crt \ | 75 | --with-ca-bundle=${sysconfdir}/ssl/certs/ca-certificates.crt \ |
78 | --without-libpsl \ | 76 | --without-libpsl \ |
79 | --enable-optimize \ | 77 | --enable-optimize \ |
@@ -103,7 +101,7 @@ do_compile_ptest() { | |||
103 | } | 101 | } |
104 | 102 | ||
105 | do_install_ptest() { | 103 | do_install_ptest() { |
106 | cat ${WORKDIR}/disable-tests >> ${S}/tests/data/DISABLED | 104 | cat ${UNPACKDIR}/disable-tests >> ${S}/tests/data/DISABLED |
107 | rm -f ${B}/tests/configurehelp.pm | 105 | rm -f ${B}/tests/configurehelp.pm |
108 | cp -rf ${B}/tests ${D}${PTEST_PATH} | 106 | cp -rf ${B}/tests ${D}${PTEST_PATH} |
109 | rm -f ${D}${PTEST_PATH}/tests/libtest/.libs/libhostname.la | 107 | rm -f ${D}${PTEST_PATH}/tests/libtest/.libs/libhostname.la |
@@ -120,6 +118,7 @@ do_install_ptest() { | |||
120 | 118 | ||
121 | RDEPENDS:${PN}-ptest += " \ | 119 | RDEPENDS:${PN}-ptest += " \ |
122 | bash \ | 120 | bash \ |
121 | locale-base-en-us \ | ||
123 | perl-module-b \ | 122 | perl-module-b \ |
124 | perl-module-base \ | 123 | perl-module-base \ |
125 | perl-module-cwd \ | 124 | perl-module-cwd \ |
@@ -135,7 +134,6 @@ RDEPENDS:${PN}-ptest += " \ | |||
135 | perl-module-storable \ | 134 | perl-module-storable \ |
136 | perl-module-time-hires \ | 135 | perl-module-time-hires \ |
137 | " | 136 | " |
138 | RDEPENDS:${PN}-ptest:append:libc-glibc = " locale-base-en-us" | ||
139 | 137 | ||
140 | PACKAGES =+ "lib${BPN}" | 138 | PACKAGES =+ "lib${BPN}" |
141 | 139 | ||
diff --git a/meta/recipes-support/db/db_5.3.28.bb b/meta/recipes-support/db/db_5.3.28.bb index a99d5cea62..a7d061e0da 100644 --- a/meta/recipes-support/db/db_5.3.28.bb +++ b/meta/recipes-support/db/db_5.3.28.bb | |||
@@ -116,3 +116,7 @@ INSANE_SKIP:${PN} = "dev-so" | |||
116 | INSANE_SKIP:${PN}-cxx = "dev-so" | 116 | INSANE_SKIP:${PN}-cxx = "dev-so" |
117 | 117 | ||
118 | BBCLASSEXTEND = "native nativesdk" | 118 | BBCLASSEXTEND = "native nativesdk" |
119 | |||
120 | # many configure tests are failing with gcc-14 | ||
121 | CFLAGS += "-Wno-error=implicit-int -Wno-error=implicit-function-declaration" | ||
122 | BUILD_CFLAGS += "-Wno-error=implicit-int -Wno-error=implicit-function-declaration" | ||
diff --git a/meta/recipes-support/diffoscope/diffoscope_265.bb b/meta/recipes-support/diffoscope/diffoscope_267.bb index 6af5abc974..69bad508cd 100644 --- a/meta/recipes-support/diffoscope/diffoscope_265.bb +++ b/meta/recipes-support/diffoscope/diffoscope_267.bb | |||
@@ -12,7 +12,7 @@ PYPI_PACKAGE = "diffoscope" | |||
12 | 12 | ||
13 | inherit pypi setuptools3 | 13 | inherit pypi setuptools3 |
14 | 14 | ||
15 | SRC_URI[sha256sum] = "7bdcbd7fc5bc4c821bf6ab5ffbbeb265103b04e6908ea4bb12144d7e5ca002ff" | 15 | SRC_URI[sha256sum] = "c0a807aa66e18eae88c1adca28988675c9749d3ad1a8db3d2eb7e2afb8763568" |
16 | 16 | ||
17 | RDEPENDS:${PN} += "\ | 17 | RDEPENDS:${PN} += "\ |
18 | binutils \ | 18 | binutils \ |
diff --git a/meta/recipes-support/enchant/enchant2_2.7.2.bb b/meta/recipes-support/enchant/enchant2_2.7.3.bb index 08998dd48f..d5073b6f31 100644 --- a/meta/recipes-support/enchant/enchant2_2.7.2.bb +++ b/meta/recipes-support/enchant/enchant2_2.7.3.bb | |||
@@ -12,7 +12,7 @@ DEPENDS = "glib-2.0 groff-native" | |||
12 | inherit autotools pkgconfig github-releases | 12 | inherit autotools pkgconfig github-releases |
13 | 13 | ||
14 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/enchant-${PV}.tar.gz" | 14 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/enchant-${PV}.tar.gz" |
15 | SRC_URI[sha256sum] = "7cc3400a6657974a740b6e3c2568e2935c70e5302f07fadb2095366b75ecad6f" | 15 | SRC_URI[sha256sum] = "fe6ad4cbe8c71b9384ffdef962be52d4d2bd5ebfb6351435bb390543d4f78b1e" |
16 | 16 | ||
17 | GITHUB_BASE_URI = "https://github.com/AbiWord/enchant/releases" | 17 | GITHUB_BASE_URI = "https://github.com/AbiWord/enchant/releases" |
18 | 18 | ||
diff --git a/meta/recipes-support/fribidi/fribidi_1.0.13.bb b/meta/recipes-support/fribidi/fribidi_1.0.14.bb index 5d0476a375..51752096de 100644 --- a/meta/recipes-support/fribidi/fribidi_1.0.13.bb +++ b/meta/recipes-support/fribidi/fribidi_1.0.14.bb | |||
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7" | |||
11 | 11 | ||
12 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.xz \ | 12 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/${BP}.tar.xz \ |
13 | " | 13 | " |
14 | SRC_URI[sha256sum] = "7fa16c80c81bd622f7b198d31356da139cc318a63fc7761217af4130903f54a2" | 14 | SRC_URI[sha256sum] = "76ae204a7027652ac3981b9fa5817c083ba23114340284c58e756b259cd2259a" |
15 | 15 | ||
16 | inherit meson lib_package pkgconfig github-releases | 16 | inherit meson lib_package pkgconfig github-releases |
17 | 17 | ||
diff --git a/meta/recipes-support/icu/icu_74-2.bb b/meta/recipes-support/icu/icu_75-1.bb index 8352bf2a5b..14fbf31de5 100644 --- a/meta/recipes-support/icu/icu_74-2.bb +++ b/meta/recipes-support/icu/icu_75-1.bb | |||
@@ -78,7 +78,7 @@ FILES:libicuio = "${libdir}/libicuio.so.*" | |||
78 | 78 | ||
79 | BBCLASSEXTEND = "native nativesdk" | 79 | BBCLASSEXTEND = "native nativesdk" |
80 | 80 | ||
81 | LIC_FILES_CHKSUM = "file://../LICENSE;md5=08dc3852df8fffa807301902ad899ff8" | 81 | LIC_FILES_CHKSUM = "file://../LICENSE;md5=9cf7c317c3f2a0962437465a9022dbe9" |
82 | 82 | ||
83 | def icu_download_version(d): | 83 | def icu_download_version(d): |
84 | pvsplit = d.getVar('PV').split('-') | 84 | pvsplit = d.getVar('PV').split('-') |
@@ -111,8 +111,8 @@ SRC_URI = "${BASE_SRC_URI};name=code \ | |||
111 | SRC_URI:append:class-target = "\ | 111 | SRC_URI:append:class-target = "\ |
112 | file://0001-Disable-LDFLAGSICUDT-for-Linux.patch \ | 112 | file://0001-Disable-LDFLAGSICUDT-for-Linux.patch \ |
113 | " | 113 | " |
114 | SRC_URI[code.sha256sum] = "68db082212a96d6f53e35d60f47d38b962e9f9d207a74cfac78029ae8ff5e08c" | 114 | SRC_URI[code.sha256sum] = "cb968df3e4d2e87e8b11c49a5d01c787bd13b9545280fc6642f826527618caef" |
115 | SRC_URI[data.sha256sum] = "c28c3ca5f4ba3384781797138a294ca360988d4322674ad4d51e52f5d9b0a2b6" | 115 | SRC_URI[data.sha256sum] = "a5104212dc317a64f9b035723ea706f2f4fd5a0f37b7923fae7aeb9d1d0061b1" |
116 | 116 | ||
117 | UPSTREAM_CHECK_REGEX = "releases/tag/release-(?P<pver>(?!.+rc).+)" | 117 | UPSTREAM_CHECK_REGEX = "releases/tag/release-(?P<pver>(?!.+rc).+)" |
118 | GITHUB_BASE_URI = "https://github.com/unicode-org/icu/releases" | 118 | GITHUB_BASE_URI = "https://github.com/unicode-org/icu/releases" |
diff --git a/meta/recipes-support/libcap-ng/files/0001-Fix-python-path-when-invoking-py-compile-54.patch b/meta/recipes-support/libcap-ng/files/0001-Fix-python-path-when-invoking-py-compile-54.patch new file mode 100644 index 0000000000..a0452ad53d --- /dev/null +++ b/meta/recipes-support/libcap-ng/files/0001-Fix-python-path-when-invoking-py-compile-54.patch | |||
@@ -0,0 +1,34 @@ | |||
1 | From 1fe7c1cfeea00ba4eb903fbb39b74361594d4835 Mon Sep 17 00:00:00 2001 | ||
2 | From: Jan Palus <jpalus@fastmail.com> | ||
3 | Date: Wed, 10 Apr 2024 21:30:51 +0200 | ||
4 | Subject: [PATCH] Fix python path when invoking py-compile (#54) | ||
5 | |||
6 | 48eebb2 replaced custom PYTHON3 variable with PYTHON by using standard | ||
7 | AM_PATH_PYTHON macro. Makefile however still referred to old one. | ||
8 | There's no need to set PYTHON explicitly anymore so drop it. | ||
9 | |||
10 | Fixes #53 | ||
11 | |||
12 | Upstream-Status: Backport | ||
13 | [https://github.com/stevegrubb/libcap-ng/commit/1fe7c1cfeea00ba4eb903fbb39b74361594d4835] | ||
14 | |||
15 | Signed-off-by: Yi Zhao <yi.zhao@windriver.com> | ||
16 | --- | ||
17 | bindings/python3/Makefile.am | 1 - | ||
18 | 1 file changed, 1 deletion(-) | ||
19 | |||
20 | diff --git a/bindings/python3/Makefile.am b/bindings/python3/Makefile.am | ||
21 | index 70a1dd8..6072fc2 100644 | ||
22 | --- a/bindings/python3/Makefile.am | ||
23 | +++ b/bindings/python3/Makefile.am | ||
24 | @@ -27,7 +27,6 @@ AM_CPPFLAGS = -I. -I$(top_builddir) $(PYTHON3_INCLUDES) | ||
25 | LIBS = ${top_builddir}/src/libcap-ng.la | ||
26 | SWIG_FLAGS = -python | ||
27 | SWIG_INCLUDES = ${AM_CPPFLAGS} | ||
28 | -PYTHON = $(PYTHON3) | ||
29 | pyexec_PYTHON = capng.py | ||
30 | pyexec_LTLIBRARIES = _capng.la | ||
31 | pyexec_SOLIBRARIES = _capng.so | ||
32 | -- | ||
33 | 2.25.1 | ||
34 | |||
diff --git a/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch b/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch deleted file mode 100644 index fb424fe725..0000000000 --- a/meta/recipes-support/libcap-ng/files/fix-issues-with-swig-4-2.patch +++ /dev/null | |||
@@ -1,32 +0,0 @@ | |||
1 | From 355eada2d20886287cffc16e304087dd6f66ae37 Mon Sep 17 00:00:00 2001 | ||
2 | From: Steve Grubb <ausearch.1@gmail.com> | ||
3 | Date: Thu, 4 Jan 2024 15:06:29 -0500 | ||
4 | Subject: [PATCH] Remove python global exception handler since its deprecated | ||
5 | |||
6 | Upstream-Status: Backport [https://github.com/stevegrubb/libcap-ng/commit/30453b6553948cd05c438f9f509013e3bb84f25b] | ||
7 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> | ||
8 | --- | ||
9 | bindings/src/capng_swig.i | 7 ------- | ||
10 | 1 file changed, 7 deletions(-) | ||
11 | |||
12 | diff --git a/bindings/src/capng_swig.i b/bindings/src/capng_swig.i | ||
13 | index fcdaf18..fa85e13 100644 | ||
14 | --- a/bindings/src/capng_swig.i | ||
15 | +++ b/bindings/src/capng_swig.i | ||
16 | @@ -30,13 +30,6 @@ | ||
17 | |||
18 | %varargs(16, signed capability = 0) capng_updatev; | ||
19 | |||
20 | -%except(python) { | ||
21 | - $action | ||
22 | - if (result < 0) { | ||
23 | - PyErr_SetFromErrno(PyExc_OSError); | ||
24 | - return NULL; | ||
25 | - } | ||
26 | -} | ||
27 | #endif | ||
28 | |||
29 | %define __signed__ | ||
30 | -- | ||
31 | 2.43.2 | ||
32 | |||
diff --git a/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb b/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.5.bb index 4790134ae9..4790134ae9 100644 --- a/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.4.bb +++ b/meta/recipes-support/libcap-ng/libcap-ng-python_0.8.5.bb | |||
diff --git a/meta/recipes-support/libcap-ng/libcap-ng.inc b/meta/recipes-support/libcap-ng/libcap-ng.inc index 845b7c2f0a..12b4002d11 100644 --- a/meta/recipes-support/libcap-ng/libcap-ng.inc +++ b/meta/recipes-support/libcap-ng/libcap-ng.inc | |||
@@ -8,10 +8,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ | |||
8 | file://COPYING.LIB;md5=e3eda01d9815f8d24aae2dbd89b68b06" | 8 | file://COPYING.LIB;md5=e3eda01d9815f8d24aae2dbd89b68b06" |
9 | 9 | ||
10 | SRC_URI = "https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-${PV}.tar.gz \ | 10 | SRC_URI = "https://people.redhat.com/sgrubb/libcap-ng/libcap-ng-${PV}.tar.gz \ |
11 | file://fix-issues-with-swig-4-2.patch \ | 11 | file://0001-Fix-python-path-when-invoking-py-compile-54.patch \ |
12 | " | 12 | " |
13 | 13 | ||
14 | SRC_URI[sha256sum] = "68581d3b38e7553cb6f6ddf7813b1fc99e52856f21421f7b477ce5abd2605a8a" | 14 | SRC_URI[sha256sum] = "3ba5294d1cbdfa98afaacfbc00b6af9ed2b83e8a21817185dfd844cc8c7ac6ff" |
15 | 15 | ||
16 | EXTRA_OECONF:append:class-target = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h" | 16 | EXTRA_OECONF:append:class-target = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h" |
17 | EXTRA_OECONF:append:class-nativesdk = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h" | 17 | EXTRA_OECONF:append:class-nativesdk = " --with-capability_header=${STAGING_INCDIR}/linux/capability.h" |
diff --git a/meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb b/meta/recipes-support/libcap-ng/libcap-ng_0.8.5.bb index 3dbe3e2ffd..3dbe3e2ffd 100644 --- a/meta/recipes-support/libcap-ng/libcap-ng_0.8.4.bb +++ b/meta/recipes-support/libcap-ng/libcap-ng_0.8.5.bb | |||
diff --git a/meta/recipes-support/libfm/libfm_1.3.2.bb b/meta/recipes-support/libfm/libfm_1.3.2.bb index 057c737029..1d7609165b 100644 --- a/meta/recipes-support/libfm/libfm_1.3.2.bb +++ b/meta/recipes-support/libfm/libfm_1.3.2.bb | |||
@@ -53,3 +53,7 @@ do_install:append () { | |||
53 | rm -f ${D}${libdir}/libfm-extra.a | 53 | rm -f ${D}${libdir}/libfm-extra.a |
54 | rm -f ${D}${libdir}/libfm-extra.la | 54 | rm -f ${D}${libdir}/libfm-extra.la |
55 | } | 55 | } |
56 | |||
57 | # http://errors.yoctoproject.org/Errors/Details/766924/ | ||
58 | # libfm-1.3.2/src/actions/action.c:2050:25: error: assignment to 'gchar **' {aka 'char **'} from incompatible pointer type 'const gchar * const*' {aka 'const char * const*'} [-Wincompatible-pointer-types] | ||
59 | CFLAGS += "-Wno-error=incompatible-pointer-types" | ||
diff --git a/meta/recipes-support/libgit2/libgit2_1.7.2.bb b/meta/recipes-support/libgit2/libgit2_1.8.1.bb index d1629c0c0b..618a4ea6ea 100644 --- a/meta/recipes-support/libgit2/libgit2_1.7.2.bb +++ b/meta/recipes-support/libgit2/libgit2_1.8.1.bb | |||
@@ -1,12 +1,12 @@ | |||
1 | SUMMARY = "the Git linkable library" | 1 | SUMMARY = "the Git linkable library" |
2 | HOMEPAGE = "http://libgit2.github.com/" | 2 | HOMEPAGE = "http://libgit2.github.com/" |
3 | LICENSE = "GPL-2.0-with-GCC-exception & MIT & OpenSSL & BSD-3-Clause & Zlib & ISC & LGPL-2.1-or-later & CC0-1.0 & BSD-2-Clause" | 3 | LICENSE = "GPL-2.0-with-GCC-exception & MIT & OpenSSL & BSD-3-Clause & Zlib & ISC & LGPL-2.1-or-later & CC0-1.0 & BSD-2-Clause" |
4 | LIC_FILES_CHKSUM = "file://COPYING;md5=5bdf47bbc9a39dc6ce076d59e322dc17" | 4 | LIC_FILES_CHKSUM = "file://COPYING;md5=8eacfdc17c8f4d219e131a073973b97d" |
5 | 5 | ||
6 | DEPENDS = "curl openssl zlib libssh2 libgcrypt libpcre2" | 6 | DEPENDS = "curl openssl zlib libssh2 libgcrypt libpcre2" |
7 | 7 | ||
8 | SRC_URI = "git://github.com/libgit2/libgit2.git;branch=maint/v1.7;protocol=https" | 8 | SRC_URI = "git://github.com/libgit2/libgit2.git;branch=main;protocol=https" |
9 | SRCREV = "a418d9d4ab87bae16b87d8f37143a4687ae0e4b2" | 9 | SRCREV = "36f7e21ad757a3dacc58cf7944329da6bc1d6e96" |
10 | 10 | ||
11 | S = "${WORKDIR}/git" | 11 | S = "${WORKDIR}/git" |
12 | 12 | ||
diff --git a/meta/recipes-support/libgpg-error/libgpg-error_1.48.bb b/meta/recipes-support/libgpg-error/libgpg-error_1.49.bb index 9f2b49209d..b7d41ab489 100644 --- a/meta/recipes-support/libgpg-error/libgpg-error_1.48.bb +++ b/meta/recipes-support/libgpg-error/libgpg-error_1.49.bb | |||
@@ -20,7 +20,7 @@ SRC_URI = "${GNUPG_MIRROR}/libgpg-error/libgpg-error-${PV}.tar.bz2 \ | |||
20 | file://run-ptest \ | 20 | file://run-ptest \ |
21 | " | 21 | " |
22 | 22 | ||
23 | SRC_URI[sha256sum] = "89ce1ae893e122924b858de84dc4f67aae29ffa610ebf668d5aa539045663d6f" | 23 | SRC_URI[sha256sum] = "8b79d54639dbf4abc08b5406fb2f37e669a2dec091dd024fb87dd367131c63a9" |
24 | 24 | ||
25 | BINCONFIG = "${bindir}/gpg-error-config" | 25 | BINCONFIG = "${bindir}/gpg-error-config" |
26 | 26 | ||
diff --git a/meta/recipes-support/libnl/libnl_3.9.0.bb b/meta/recipes-support/libnl/libnl_3.9.0.bb index db9d93e8cb..b2825374cf 100644 --- a/meta/recipes-support/libnl/libnl_3.9.0.bb +++ b/meta/recipes-support/libnl/libnl_3.9.0.bb | |||
@@ -4,7 +4,7 @@ APIs to netlink protocol based Linux kernel interfaces. libnl is the core \ | |||
4 | library implementing the fundamentals required to use the netlink protocol \ | 4 | library implementing the fundamentals required to use the netlink protocol \ |
5 | such as socket handling, message construction and parsing, and sending \ | 5 | such as socket handling, message construction and parsing, and sending \ |
6 | and receiving of data." | 6 | and receiving of data." |
7 | HOMEPAGE = "http://www.infradead.org/~tgr/libnl/" | 7 | HOMEPAGE = "https://github.com/thom311/libnl" |
8 | SECTION = "libs/network" | 8 | SECTION = "libs/network" |
9 | 9 | ||
10 | PE = "1" | 10 | PE = "1" |
diff --git a/meta/recipes-support/libpcre/libpcre_8.45.bb b/meta/recipes-support/libpcre/libpcre_8.45.bb index 46fedbae48..17fb6846a5 100644 --- a/meta/recipes-support/libpcre/libpcre_8.45.bb +++ b/meta/recipes-support/libpcre/libpcre_8.45.bb | |||
@@ -56,7 +56,7 @@ BBCLASSEXTEND = "native nativesdk" | |||
56 | 56 | ||
57 | do_install_ptest() { | 57 | do_install_ptest() { |
58 | t=${D}${PTEST_PATH} | 58 | t=${D}${PTEST_PATH} |
59 | cp ${WORKDIR}/Makefile $t | 59 | cp ${UNPACKDIR}/Makefile $t |
60 | cp -r ${S}/testdata $t | 60 | cp -r ${S}/testdata $t |
61 | for i in pcre_stringpiece_unittest pcregrep pcretest; \ | 61 | for i in pcre_stringpiece_unittest pcregrep pcretest; \ |
62 | do cp ${B}/.libs/$i $t; \ | 62 | do cp ${B}/.libs/$i $t; \ |
diff --git a/meta/recipes-support/libusb/libusb1_1.0.27.bb b/meta/recipes-support/libusb/libusb1_1.0.27.bb index f2431d75c8..5bf854f95d 100644 --- a/meta/recipes-support/libusb/libusb1_1.0.27.bb +++ b/meta/recipes-support/libusb/libusb1_1.0.27.bb | |||
@@ -8,6 +8,8 @@ SECTION = "libs" | |||
8 | LICENSE = "LGPL-2.1-or-later" | 8 | LICENSE = "LGPL-2.1-or-later" |
9 | LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24" | 9 | LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24" |
10 | 10 | ||
11 | CVE_PRODUCT = "libusb" | ||
12 | |||
11 | BBCLASSEXTEND = "native nativesdk" | 13 | BBCLASSEXTEND = "native nativesdk" |
12 | 14 | ||
13 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/libusb-${PV}.tar.bz2 \ | 15 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/libusb-${PV}.tar.bz2 \ |
diff --git a/meta/recipes-support/lzop/lzop_1.04.bb b/meta/recipes-support/lzop/lzop_1.04.bb index d9b3524b67..2b83aa06d9 100644 --- a/meta/recipes-support/lzop/lzop_1.04.bb +++ b/meta/recipes-support/lzop/lzop_1.04.bb | |||
@@ -21,7 +21,7 @@ SRC_URI[sha256sum] = "7e72b62a8a60aff5200a047eea0773a8fb205caf7acbe1774d95147f30 | |||
21 | inherit autotools | 21 | inherit autotools |
22 | 22 | ||
23 | do_configure:prepend () { | 23 | do_configure:prepend () { |
24 | install -Dm 0644 ${WORKDIR}/acinclude.m4 ${S}/acinclude.m4 | 24 | install -Dm 0644 ${UNPACKDIR}/acinclude.m4 ${S}/acinclude.m4 |
25 | } | 25 | } |
26 | 26 | ||
27 | BBCLASSEXTEND = "native nativesdk" | 27 | BBCLASSEXTEND = "native nativesdk" |
diff --git a/meta/recipes-support/nghttp2/nghttp2_1.61.0.bb b/meta/recipes-support/nghttp2/nghttp2_1.62.0.bb index ad85576dcb..5a820d8cd5 100644 --- a/meta/recipes-support/nghttp2/nghttp2_1.61.0.bb +++ b/meta/recipes-support/nghttp2/nghttp2_1.62.0.bb | |||
@@ -5,7 +5,7 @@ LICENSE = "MIT" | |||
5 | LIC_FILES_CHKSUM = "file://COPYING;md5=764abdf30b2eadd37ce47dcbce0ea1ec" | 5 | LIC_FILES_CHKSUM = "file://COPYING;md5=764abdf30b2eadd37ce47dcbce0ea1ec" |
6 | 6 | ||
7 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/nghttp2-${PV}.tar.xz" | 7 | SRC_URI = "${GITHUB_BASE_URI}/download/v${PV}/nghttp2-${PV}.tar.xz" |
8 | SRC_URI[sha256sum] = "c0e660175b9dc429f11d25b9507a834fb752eea9135ab420bb7cb7e9dbcc9654" | 8 | SRC_URI[sha256sum] = "26798308fa0a12dabdb7ba8c77f74383019d3a0f1f36d25958b836af22474958" |
9 | 9 | ||
10 | inherit cmake manpages python3native github-releases | 10 | inherit cmake manpages python3native github-releases |
11 | PACKAGECONFIG[manpages] = "" | 11 | PACKAGECONFIG[manpages] = "" |
diff --git a/meta/recipes-support/numactl/numactl_git.bb b/meta/recipes-support/numactl/numactl_git.bb index bd16df91ec..6c853f1966 100644 --- a/meta/recipes-support/numactl/numactl_git.bb +++ b/meta/recipes-support/numactl/numactl_git.bb | |||
@@ -52,7 +52,7 @@ do_install_ptest() { | |||
52 | install -m 0755 ${B}/test/$i ${D}${PTEST_PATH}/test | 52 | install -m 0755 ${B}/test/$i ${D}${PTEST_PATH}/test |
53 | done | 53 | done |
54 | 54 | ||
55 | install -m 0755 ${WORKDIR}/Makefile ${D}${PTEST_PATH}/ | 55 | install -m 0755 ${UNPACKDIR}/Makefile ${D}${PTEST_PATH}/ |
56 | install -m 0755 ${B}/.libs/numactl ${D}${PTEST_PATH}/ | 56 | install -m 0755 ${B}/.libs/numactl ${D}${PTEST_PATH}/ |
57 | } | 57 | } |
58 | 58 | ||
diff --git a/meta/recipes-support/pinentry/pinentry-1.2.1/gpg-error_pkconf.patch b/meta/recipes-support/pinentry/pinentry-1.3.0/gpg-error_pkconf.patch index bb7e43b1e7..863ba9e34b 100644 --- a/meta/recipes-support/pinentry/pinentry-1.2.1/gpg-error_pkconf.patch +++ b/meta/recipes-support/pinentry/pinentry-1.3.0/gpg-error_pkconf.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 785777dc0bc6b69ff68c91547ec6b6634049662f Mon Sep 17 00:00:00 2001 | 1 | From ccc3c6a8d469bbfa6717b970cfe70816c1fd545e Mon Sep 17 00:00:00 2001 |
2 | From: Armin Kuster <akuster@mvista.com> | 2 | From: Armin Kuster <akuster@mvista.com> |
3 | Date: Fri, 2 Sep 2005 11:50:01 +0000 | 3 | Date: Fri, 2 Sep 2005 11:50:01 +0000 |
4 | Subject: [PATCH] Add gtk+, avahi, dbus-0.34 (.36 coming soon) and | 4 | Subject: [PATCH] Add gtk+, avahi, dbus-0.34 (.36 coming soon) and |
@@ -9,20 +9,20 @@ the gpg-error recipe for gpg-error.pc generation. | |||
9 | Upstream-Status: Inappropriate [OE specific] | 9 | Upstream-Status: Inappropriate [OE specific] |
10 | 10 | ||
11 | Signed-off-by: Armin Kuster <akuster@mvista.com> | 11 | Signed-off-by: Armin Kuster <akuster@mvista.com> |
12 | |||
13 | --- | 12 | --- |
14 | m4/gpg-error.m4 | 160 ++---------------------------------------------- | 13 | m4/gpg-error.m4 | 184 ++---------------------------------------------- |
15 | 1 file changed, 4 insertions(+), 156 deletions(-) | 14 | 1 file changed, 5 insertions(+), 179 deletions(-) |
16 | 15 | ||
17 | diff --git a/m4/gpg-error.m4 b/m4/gpg-error.m4 | 16 | diff --git a/m4/gpg-error.m4 b/m4/gpg-error.m4 |
18 | index 4b5cd40..7dfbb83 100644 | 17 | index 7fa52b1..c0784ed 100644 |
19 | --- a/m4/gpg-error.m4 | 18 | --- a/m4/gpg-error.m4 |
20 | +++ b/m4/gpg-error.m4 | 19 | +++ b/m4/gpg-error.m4 |
21 | @@ -26,160 +26,12 @@ dnl config script does not match the host specification the script | 20 | @@ -15,188 +15,18 @@ |
22 | dnl is added to the gpg_config_script_warn variable. | 21 | dnl |
22 | dnl Find gpg-error-config, for backward compatibility | ||
23 | dnl | 23 | dnl |
24 | AC_DEFUN([AM_PATH_GPG_ERROR], | 24 | -dnl _AM_PATH_POSSIBLE_GPG_ERROR_CONFIG |
25 | -[ AC_REQUIRE([AC_CANONICAL_HOST]) | 25 | -AC_DEFUN([_AM_PATH_POSSIBLE_GPG_ERROR_CONFIG],[dnl |
26 | - gpg_error_config_prefix="" | 26 | - gpg_error_config_prefix="" |
27 | - dnl --with-libgpg-error-prefix=PFX is the preferred name for this option, | 27 | - dnl --with-libgpg-error-prefix=PFX is the preferred name for this option, |
28 | - dnl since that is consistent with how our three siblings use the directory/ | 28 | - dnl since that is consistent with how our three siblings use the directory/ |
@@ -58,9 +58,14 @@ index 4b5cd40..7dfbb83 100644 | |||
58 | - fi | 58 | - fi |
59 | - | 59 | - |
60 | - AC_PATH_PROG(GPG_ERROR_CONFIG, gpg-error-config, no) | 60 | - AC_PATH_PROG(GPG_ERROR_CONFIG, gpg-error-config, no) |
61 | - min_gpg_error_version=ifelse([$1], ,1.33,$1) | 61 | -]) |
62 | - ok=no | ||
63 | - | 62 | - |
63 | -dnl | ||
64 | -dnl Find gpgrt-config, which uses .pc file | ||
65 | -dnl (minimum pkg-config functionality, supporting cross build) | ||
66 | -dnl | ||
67 | -dnl _AM_PATH_GPGRT_CONFIG | ||
68 | -AC_DEFUN([_AM_PATH_GPGRT_CONFIG],[dnl | ||
64 | - AC_PATH_PROG(GPGRT_CONFIG, gpgrt-config, no, [$prefix/bin:$PATH]) | 69 | - AC_PATH_PROG(GPGRT_CONFIG, gpgrt-config, no, [$prefix/bin:$PATH]) |
65 | - if test "$GPGRT_CONFIG" != "no"; then | 70 | - if test "$GPGRT_CONFIG" != "no"; then |
66 | - # Determine gpgrt_libdir | 71 | - # Determine gpgrt_libdir |
@@ -116,8 +121,9 @@ index 4b5cd40..7dfbb83 100644 | |||
116 | - fi | 121 | - fi |
117 | - if test -n "$gpgrt_libdir"; then break; fi | 122 | - if test -n "$gpgrt_libdir"; then break; fi |
118 | - done | 123 | - done |
119 | - else | 124 | - fi |
120 | - # When we cannot determine system libdir-format, use this: | 125 | - if test -z "$gpgrt_libdir"; then |
126 | - # No valid pkgconfig dir in any of the system directories, fallback | ||
121 | - gpgrt_libdir=${possible_libdir1} | 127 | - gpgrt_libdir=${possible_libdir1} |
122 | - fi | 128 | - fi |
123 | - else | 129 | - else |
@@ -131,12 +137,33 @@ index 4b5cd40..7dfbb83 100644 | |||
131 | - AC_MSG_NOTICE([Use gpgrt-config with $gpgrt_libdir as gpg-error-config]) | 137 | - AC_MSG_NOTICE([Use gpgrt-config with $gpgrt_libdir as gpg-error-config]) |
132 | - gpg_error_config_version=`$GPG_ERROR_CONFIG --modversion` | 138 | - gpg_error_config_version=`$GPG_ERROR_CONFIG --modversion` |
133 | - else | 139 | - else |
140 | - gpg_error_config_version=`$GPG_ERROR_CONFIG --version` | ||
134 | - unset GPGRT_CONFIG | 141 | - unset GPGRT_CONFIG |
135 | - fi | 142 | - fi |
136 | - elif test "$GPG_ERROR_CONFIG" != "no"; then | 143 | - elif test "$GPG_ERROR_CONFIG" != "no"; then |
137 | - gpg_error_config_version=`$GPG_ERROR_CONFIG --version` | 144 | - gpg_error_config_version=`$GPG_ERROR_CONFIG --version` |
138 | - unset GPGRT_CONFIG | 145 | - unset GPGRT_CONFIG |
139 | - fi | 146 | - fi |
147 | -]) | ||
148 | - | ||
149 | -dnl AM_PATH_GPG_ERROR([MINIMUM-VERSION, | ||
150 | -dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) | ||
151 | -dnl | ||
152 | -dnl Test for libgpg-error and define GPG_ERROR_CFLAGS, GPG_ERROR_LIBS, | ||
153 | -dnl GPG_ERROR_MT_CFLAGS, and GPG_ERROR_MT_LIBS. The _MT_ variants are | ||
154 | -dnl used for programs requireing real multi thread support. | ||
155 | -dnl | ||
156 | dnl If a prefix option is not used, the config script is first | ||
157 | dnl searched in $SYSROOT/bin and then along $PATH. If the used | ||
158 | dnl config script does not match the host specification the script | ||
159 | dnl is added to the gpg_config_script_warn variable. | ||
160 | dnl | ||
161 | -AC_DEFUN([AM_PATH_GPG_ERROR],[dnl | ||
162 | -AC_REQUIRE([AC_CANONICAL_HOST])dnl | ||
163 | -AC_REQUIRE([_AM_PATH_POSSIBLE_GPG_ERROR_CONFIG])dnl | ||
164 | -AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl | ||
165 | - min_gpg_error_version=ifelse([$1], ,1.33,$1) | ||
166 | - ok=no | ||
140 | - if test "$GPG_ERROR_CONFIG" != "no"; then | 167 | - if test "$GPG_ERROR_CONFIG" != "no"; then |
141 | - req_major=`echo $min_gpg_error_version | \ | 168 | - req_major=`echo $min_gpg_error_version | \ |
142 | - sed 's/\([[0-9]]*\)\.\([[0-9]]*\)/\1/'` | 169 | - sed 's/\([[0-9]]*\)\.\([[0-9]]*\)/\1/'` |
@@ -157,6 +184,7 @@ index 4b5cd40..7dfbb83 100644 | |||
157 | - fi | 184 | - fi |
158 | - fi | 185 | - fi |
159 | - AC_MSG_CHECKING(for GPG Error - version >= $min_gpg_error_version) | 186 | - AC_MSG_CHECKING(for GPG Error - version >= $min_gpg_error_version) |
187 | +AC_DEFUN([AM_PATH_GPG_ERROR], | ||
160 | +[ | 188 | +[ |
161 | + min_gpg_error_version=ifelse([$1], ,0.0,$1) | 189 | + min_gpg_error_version=ifelse([$1], ,0.0,$1) |
162 | + PKG_CHECK_MODULES(GPG_ERROR, [gpg-error >= $min_gpg_error_version gpg-error], [ok=yes], [ok=no]) | 190 | + PKG_CHECK_MODULES(GPG_ERROR, [gpg-error >= $min_gpg_error_version gpg-error], [ok=yes], [ok=no]) |
@@ -183,7 +211,7 @@ index 4b5cd40..7dfbb83 100644 | |||
183 | if test x"$gpg_error_config_host" != xnone ; then | 211 | if test x"$gpg_error_config_host" != xnone ; then |
184 | if test x"$gpg_error_config_host" != x"$host" ; then | 212 | if test x"$gpg_error_config_host" != x"$host" ; then |
185 | AC_MSG_WARN([[ | 213 | AC_MSG_WARN([[ |
186 | @@ -194,10 +46,6 @@ AC_DEFUN([AM_PATH_GPG_ERROR], | 214 | @@ -211,10 +41,6 @@ AC_REQUIRE([_AM_PATH_GPGRT_CONFIG])dnl |
187 | fi | 215 | fi |
188 | fi | 216 | fi |
189 | else | 217 | else |
diff --git a/meta/recipes-support/pinentry/pinentry-1.2.1/libassuan_pkgconf.patch b/meta/recipes-support/pinentry/pinentry-1.3.0/libassuan_pkgconf.patch index f4aec2d1c3..bee9acd081 100644 --- a/meta/recipes-support/pinentry/pinentry-1.2.1/libassuan_pkgconf.patch +++ b/meta/recipes-support/pinentry/pinentry-1.3.0/libassuan_pkgconf.patch | |||
@@ -1,4 +1,4 @@ | |||
1 | From 26fb6c3faa27180c8ed9ada1728c3d8683a65f3a Mon Sep 17 00:00:00 2001 | 1 | From fb6179fabb5e793a83cbb0d0d45814d99b97807a Mon Sep 17 00:00:00 2001 |
2 | From: Armin Kuster <akuster@mvista.com> | 2 | From: Armin Kuster <akuster@mvista.com> |
3 | Date: Thu, 22 May 2014 10:50:00 +0100 | 3 | Date: Thu, 22 May 2014 10:50:00 +0100 |
4 | Subject: [PATCH] libassuan: Improve pkgconfig support | 4 | Subject: [PATCH] libassuan: Improve pkgconfig support |
@@ -9,17 +9,16 @@ the libassuan recipe for libassuan.pc generation. | |||
9 | Upstream-Status: Inappropriate [OE specific] | 9 | Upstream-Status: Inappropriate [OE specific] |
10 | 10 | ||
11 | Signed-off-by: Armin Kuster <akuster@mvista.com> | 11 | Signed-off-by: Armin Kuster <akuster@mvista.com> |
12 | |||
13 | --- | 12 | --- |
14 | configure.ac | 4 +-- | 13 | configure.ac | 4 +-- |
15 | m4/libassuan.m4 | 93 ++++--------------------------------------------- | 14 | m4/libassuan.m4 | 94 ++++--------------------------------------------- |
16 | 2 files changed, 9 insertions(+), 88 deletions(-) | 15 | 2 files changed, 9 insertions(+), 89 deletions(-) |
17 | 16 | ||
18 | diff --git a/configure.ac b/configure.ac | 17 | diff --git a/configure.ac b/configure.ac |
19 | index d8dcc0c..ca51766 100644 | 18 | index f04c00d..8e6da5c 100644 |
20 | --- a/configure.ac | 19 | --- a/configure.ac |
21 | +++ b/configure.ac | 20 | +++ b/configure.ac |
22 | @@ -267,8 +267,8 @@ if test "$have_libassuan" = "yes"; then | 21 | @@ -263,8 +263,8 @@ if test "$have_libassuan" = "yes"; then |
23 | [version of the libassuan library]) | 22 | [version of the libassuan library]) |
24 | fi | 23 | fi |
25 | 24 | ||
@@ -31,7 +30,7 @@ index d8dcc0c..ca51766 100644 | |||
31 | 30 | ||
32 | dnl Checks for libsecmem. | 31 | dnl Checks for libsecmem. |
33 | diff --git a/m4/libassuan.m4 b/m4/libassuan.m4 | 32 | diff --git a/m4/libassuan.m4 b/m4/libassuan.m4 |
34 | index df50484..2057412 100644 | 33 | index a2eb5d9..897f407 100644 |
35 | --- a/m4/libassuan.m4 | 34 | --- a/m4/libassuan.m4 |
36 | +++ b/m4/libassuan.m4 | 35 | +++ b/m4/libassuan.m4 |
37 | @@ -15,30 +15,8 @@ dnl | 36 | @@ -15,30 +15,8 @@ dnl |
@@ -51,7 +50,7 @@ index df50484..2057412 100644 | |||
51 | - fi | 50 | - fi |
52 | - | 51 | - |
53 | - use_gpgrt_config="" | 52 | - use_gpgrt_config="" |
54 | - if test x"${LIBASSUAN_CONFIG}" = x -a x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then | 53 | - if test x"$GPGRT_CONFIG" != x -a "$GPGRT_CONFIG" != "no"; then |
55 | - if $GPGRT_CONFIG libassuan --exists; then | 54 | - if $GPGRT_CONFIG libassuan --exists; then |
56 | - LIBASSUAN_CONFIG="$GPGRT_CONFIG libassuan" | 55 | - LIBASSUAN_CONFIG="$GPGRT_CONFIG libassuan" |
57 | - AC_MSG_NOTICE([Use gpgrt-config as libassuan-config]) | 56 | - AC_MSG_NOTICE([Use gpgrt-config as libassuan-config]) |
@@ -67,7 +66,7 @@ index df50484..2057412 100644 | |||
67 | tmp=ifelse([$1], ,1:0.9.2,$1) | 66 | tmp=ifelse([$1], ,1:0.9.2,$1) |
68 | if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then | 67 | if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then |
69 | req_libassuan_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` | 68 | req_libassuan_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` |
70 | @@ -48,58 +26,11 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON], | 69 | @@ -48,59 +26,11 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON], |
71 | min_libassuan_version="$tmp" | 70 | min_libassuan_version="$tmp" |
72 | fi | 71 | fi |
73 | 72 | ||
@@ -112,6 +111,7 @@ index df50484..2057412 100644 | |||
112 | - | 111 | - |
113 | - if test $ok = yes; then | 112 | - if test $ok = yes; then |
114 | - AC_MSG_RESULT([yes ($libassuan_config_version)]) | 113 | - AC_MSG_RESULT([yes ($libassuan_config_version)]) |
114 | - AC_DEFINE(LIBASSUAN_API_REQUESTED, $req_libassuan_api, Requested API version for libassuan) | ||
115 | - else | 115 | - else |
116 | - AC_MSG_RESULT(no) | 116 | - AC_MSG_RESULT(no) |
117 | - fi | 117 | - fi |
@@ -128,7 +128,7 @@ index df50484..2057412 100644 | |||
128 | if test "$tmp" -gt 0 ; then | 128 | if test "$tmp" -gt 0 ; then |
129 | AC_MSG_CHECKING([LIBASSUAN API version]) | 129 | AC_MSG_CHECKING([LIBASSUAN API version]) |
130 | if test "$req_libassuan_api" -eq "$tmp" ; then | 130 | if test "$req_libassuan_api" -eq "$tmp" ; then |
131 | @@ -114,11 +45,7 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON], | 131 | @@ -117,11 +47,7 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON], |
132 | 132 | ||
133 | if test $ok = yes; then | 133 | if test $ok = yes; then |
134 | if test x"$host" != x ; then | 134 | if test x"$host" != x ; then |
@@ -141,7 +141,7 @@ index df50484..2057412 100644 | |||
141 | if test x"$libassuan_config_host" != xnone ; then | 141 | if test x"$libassuan_config_host" != xnone ; then |
142 | if test x"$libassuan_config_host" != x"$host" ; then | 142 | if test x"$libassuan_config_host" != x"$host" ; then |
143 | AC_MSG_WARN([[ | 143 | AC_MSG_WARN([[ |
144 | @@ -141,7 +68,7 @@ dnl Test whether libassuan has at least MINIMUM-VERSION. This is | 144 | @@ -144,7 +70,7 @@ dnl Test whether libassuan has at least MINIMUM-VERSION. This is |
145 | dnl used to test for features only available in newer versions. | 145 | dnl used to test for features only available in newer versions. |
146 | dnl | 146 | dnl |
147 | AC_DEFUN([AM_CHECK_LIBASSUAN], | 147 | AC_DEFUN([AM_CHECK_LIBASSUAN], |
@@ -150,7 +150,7 @@ index df50484..2057412 100644 | |||
150 | if test $ok = yes; then | 150 | if test $ok = yes; then |
151 | ifelse([$2], , :, [$2]) | 151 | ifelse([$2], , :, [$2]) |
152 | else | 152 | else |
153 | @@ -157,16 +84,10 @@ dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) | 153 | @@ -160,16 +86,10 @@ dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) |
154 | dnl Test for libassuan and define LIBASSUAN_CFLAGS and LIBASSUAN_LIBS | 154 | dnl Test for libassuan and define LIBASSUAN_CFLAGS and LIBASSUAN_LIBS |
155 | dnl | 155 | dnl |
156 | AC_DEFUN([AM_PATH_LIBASSUAN], | 156 | AC_DEFUN([AM_PATH_LIBASSUAN], |
diff --git a/meta/recipes-support/pinentry/pinentry_1.2.1.bb b/meta/recipes-support/pinentry/pinentry_1.3.0.bb index 7daf80f36e..6ce873871b 100644 --- a/meta/recipes-support/pinentry/pinentry_1.2.1.bb +++ b/meta/recipes-support/pinentry/pinentry_1.3.0.bb | |||
@@ -6,7 +6,7 @@ DESCRIPTION = "\ | |||
6 | 6 | ||
7 | HOMEPAGE = "http://www.gnupg.org/related_software/pinentry/index.en.html" | 7 | HOMEPAGE = "http://www.gnupg.org/related_software/pinentry/index.en.html" |
8 | LICENSE = "GPL-2.0-only" | 8 | LICENSE = "GPL-2.0-only" |
9 | LIC_FILES_CHKSUM = "file://COPYING;md5=cbbd794e2a0a289b9dfcc9f513d1996e" | 9 | LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" |
10 | 10 | ||
11 | DEPENDS = "gettext-native libassuan libgpg-error" | 11 | DEPENDS = "gettext-native libassuan libgpg-error" |
12 | 12 | ||
@@ -16,7 +16,7 @@ SRC_URI = "${GNUPG_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \ | |||
16 | file://gpg-error_pkconf.patch \ | 16 | file://gpg-error_pkconf.patch \ |
17 | " | 17 | " |
18 | 18 | ||
19 | SRC_URI[sha256sum] = "457a185e5a85238fb945a955dc6352ab962dc8b48720b62fc9fa48c7540a4067" | 19 | SRC_URI[sha256sum] = "9b3cd5226e7597f2fded399a3bc659923351536559e9db0826981bca316494de" |
20 | 20 | ||
21 | inherit autotools pkgconfig | 21 | inherit autotools pkgconfig |
22 | 22 | ||
diff --git a/meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb b/meta/recipes-support/ptest-runner/ptest-runner_2.4.4.bb index e6668da01f..2263e07280 100644 --- a/meta/recipes-support/ptest-runner/ptest-runner_2.4.3.bb +++ b/meta/recipes-support/ptest-runner/ptest-runner_2.4.4.bb | |||
@@ -7,7 +7,7 @@ HOMEPAGE = "http://git.yoctoproject.org/cgit/cgit.cgi/ptest-runner2/about/" | |||
7 | LICENSE = "GPL-2.0-or-later" | 7 | LICENSE = "GPL-2.0-or-later" |
8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=751419260aa954499f7abaabaa882bbe" | 8 | LIC_FILES_CHKSUM = "file://LICENSE;md5=751419260aa954499f7abaabaa882bbe" |
9 | 9 | ||
10 | SRCREV = "92c1b97bfdb4a94acc1cabcaf97eef52dc29144c" | 10 | SRCREV = "95f528cff0bc52903b98c292d4a322fcffa74471" |
11 | PV .= "+git" | 11 | PV .= "+git" |
12 | 12 | ||
13 | SRC_URI = "git://git.yoctoproject.org/ptest-runner2;branch=master;protocol=https \ | 13 | SRC_URI = "git://git.yoctoproject.org/ptest-runner2;branch=master;protocol=https \ |
diff --git a/meta/recipes-support/user-creation/xuser-account_0.1.bb b/meta/recipes-support/user-creation/xuser-account_0.1.bb index 2bd3699e74..80a429c6d3 100644 --- a/meta/recipes-support/user-creation/xuser-account_0.1.bb +++ b/meta/recipes-support/user-creation/xuser-account_0.1.bb | |||
@@ -6,6 +6,9 @@ SRC_URI = "file://system-xuser.conf" | |||
6 | 6 | ||
7 | inherit allarch useradd | 7 | inherit allarch useradd |
8 | 8 | ||
9 | S = "${WORKDIR}/sources" | ||
10 | UNPACKDIR = "${S}" | ||
11 | |||
9 | do_configure() { | 12 | do_configure() { |
10 | : | 13 | : |
11 | } | 14 | } |
diff --git a/scripts/gen-site-config b/scripts/gen-site-config deleted file mode 100755 index 727b809c0f..0000000000 --- a/scripts/gen-site-config +++ /dev/null | |||
@@ -1,43 +0,0 @@ | |||
1 | #! /bin/sh | ||
2 | # Copyright (c) 2005-2008 Wind River Systems, Inc. | ||
3 | # | ||
4 | # SPDX-License-Identifier: GPL-2.0-only | ||
5 | # | ||
6 | |||
7 | cat << EOF | ||
8 | AC_PREREQ(2.57) | ||
9 | AC_INIT([site_wide],[1.0.0]) | ||
10 | |||
11 | EOF | ||
12 | |||
13 | # Disable as endian is set in the default config | ||
14 | #echo AC_C_BIGENDIAN | ||
15 | #echo | ||
16 | |||
17 | if [ -e $1/types ] ; then | ||
18 | while read type ; do | ||
19 | echo "AC_CHECK_SIZEOF([$type])" | ||
20 | done < $1/types | ||
21 | |||
22 | echo | ||
23 | fi | ||
24 | |||
25 | if [ -e $1/funcs ]; then | ||
26 | while read func ; do | ||
27 | echo "AC_CHECK_FUNCS([$func])" | ||
28 | done < $1/funcs | ||
29 | |||
30 | echo | ||
31 | fi | ||
32 | |||
33 | if [ -e $1/headers ]; then | ||
34 | while read header ; do | ||
35 | echo "AC_CHECK_HEADERS([$header])" | ||
36 | done < $1/headers | ||
37 | |||
38 | echo | ||
39 | fi | ||
40 | |||
41 | cat << EOF | ||
42 | AC_OUTPUT | ||
43 | EOF | ||
diff --git a/scripts/lib/build_perf/html/measurement_chart.html b/scripts/lib/build_perf/html/measurement_chart.html index 65f1a227ad..05bd84e6ce 100644 --- a/scripts/lib/build_perf/html/measurement_chart.html +++ b/scripts/lib/build_perf/html/measurement_chart.html | |||
@@ -1,50 +1,100 @@ | |||
1 | <script type="text/javascript"> | 1 | <script type="module"> |
2 | chartsDrawing += 1; | 2 | // Get raw data |
3 | google.charts.setOnLoadCallback(drawChart_{{ chart_elem_id }}); | 3 | const rawData = [ |
4 | function drawChart_{{ chart_elem_id }}() { | 4 | {% for sample in measurement.samples %} |
5 | var data = new google.visualization.DataTable(); | 5 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}, {{ sample.start_time }}, '{{sample.commit}}'], |
6 | {% endfor %} | ||
7 | ]; | ||
6 | 8 | ||
7 | // Chart options | 9 | const convertToMinute = (time) => { |
8 | var options = { | 10 | return time[0]*60 + time[1] + time[2]/60 + time[3]/3600; |
9 | theme : 'material', | 11 | } |
10 | legend: 'none', | ||
11 | hAxis: { format: '', title: 'Commit number', | ||
12 | minValue: {{ chart_opts.haxis.min }}, | ||
13 | maxValue: {{ chart_opts.haxis.max }} }, | ||
14 | {% if measurement.type == 'time' %} | ||
15 | vAxis: { format: 'h:mm:ss' }, | ||
16 | {% else %} | ||
17 | vAxis: { format: '' }, | ||
18 | {% endif %} | ||
19 | pointSize: 5, | ||
20 | chartArea: { left: 80, right: 15 }, | ||
21 | }; | ||
22 | 12 | ||
23 | // Define data columns | 13 | // Update value format to either minutes or leave as size value |
24 | data.addColumn('number', 'Commit'); | 14 | const updateValue = (value) => { |
25 | data.addColumn('{{ measurement.value_type.gv_data_type }}', | 15 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
26 | '{{ measurement.value_type.quantity }}'); | 16 | return Array.isArray(value) ? convertToMinute(value) : value |
27 | // Add data rows | 17 | } |
28 | data.addRows([ | ||
29 | {% for sample in measurement.samples %} | ||
30 | [{{ sample.commit_num }}, {{ sample.mean.gv_value() }}], | ||
31 | {% endfor %} | ||
32 | ]); | ||
33 | 18 | ||
34 | // Finally, draw the chart | 19 | // Convert raw data to the format: [time, value] |
35 | chart_div = document.getElementById('{{ chart_elem_id }}'); | 20 | const data = rawData.map(([commit, value, time]) => { |
36 | var chart = new google.visualization.LineChart(chart_div); | 21 | return [ |
37 | google.visualization.events.addListener(chart, 'ready', function () { | 22 | // The Date object takes values in milliseconds rather than seconds. So to use a Unix timestamp we have to multiply it by 1000. |
38 | //chart_div = document.getElementById('{{ chart_elem_id }}'); | 23 | new Date(time * 1000).getTime(), |
39 | //chart_div.innerHTML = '<img src="' + chart.getImageURI() + '">'; | 24 | // Assuming the array values are duration in the format [hours, minutes, seconds, milliseconds] |
40 | png_div = document.getElementById('{{ chart_elem_id }}_png'); | 25 | updateValue(value) |
41 | png_div.outerHTML = '<a id="{{ chart_elem_id }}_png" href="' + chart.getImageURI() + '">PNG</a>'; | 26 | ] |
42 | console.log("CHART READY: {{ chart_elem_id }}"); | 27 | }); |
43 | chartsDrawing -= 1; | 28 | |
44 | if (chartsDrawing == 0) | 29 | // Set chart options |
45 | console.log("ALL CHARTS READY"); | 30 | const option = { |
31 | tooltip: { | ||
32 | trigger: 'axis', | ||
33 | enterable: true, | ||
34 | position: function (point, params, dom, rect, size) { | ||
35 | return [point[0]-150, '10%']; | ||
36 | }, | ||
37 | formatter: function (param) { | ||
38 | const value = param[0].value[1] | ||
39 | const sample = rawData.filter(([commit, dataValue]) => updateValue(dataValue) === value) | ||
40 | // Add commit hash to the tooltip as a link | ||
41 | const commitLink = `https://git.yoctoproject.org/poky/commit/?id=${sample[0][3]}` | ||
42 | if ('{{ measurement.value_type.quantity }}' == 'time') { | ||
43 | const hours = Math.floor(value/60) | ||
44 | const minutes = Math.floor(value % 60) | ||
45 | const seconds = Math.floor((value * 60) % 60) | ||
46 | return `<strong>Duration:</strong> ${hours}:${minutes}:${seconds}, <br/> <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>` | ||
47 | } | ||
48 | return `<strong>Size:</strong> ${value.toFixed(2)} MB, <br/> <strong>Commit number:</strong> <a href="${commitLink}" target="_blank" rel="noreferrer noopener">${sample[0][0]}</a>` | ||
49 | ;} | ||
50 | }, | ||
51 | xAxis: { | ||
52 | type: 'time', | ||
53 | }, | ||
54 | yAxis: { | ||
55 | name: '{{ measurement.value_type.quantity }}' == 'time' ? 'Duration in minutes' : 'Disk size in MB', | ||
56 | type: 'value', | ||
57 | min: function(value) { | ||
58 | return Math.round(value.min - 0.5); | ||
59 | }, | ||
60 | max: function(value) { | ||
61 | return Math.round(value.max + 0.5); | ||
62 | } | ||
63 | }, | ||
64 | dataZoom: [ | ||
65 | { | ||
66 | type: 'slider', | ||
67 | xAxisIndex: 0, | ||
68 | filterMode: 'none' | ||
69 | }, | ||
70 | ], | ||
71 | series: [ | ||
72 | { | ||
73 | name: '{{ measurement.value_type.quantity }}', | ||
74 | type: 'line', | ||
75 | step: 'start', | ||
76 | symbol: 'none', | ||
77 | data: data | ||
78 | } | ||
79 | ] | ||
80 | }; | ||
81 | |||
82 | // Draw chart | ||
83 | const chart_div = document.getElementById('{{ chart_elem_id }}'); | ||
84 | // Set dark mode | ||
85 | let measurement_chart | ||
86 | if (window.matchMedia('(prefers-color-scheme: dark)').matches) { | ||
87 | measurement_chart= echarts.init(chart_div, 'dark', { | ||
88 | height: 320 | ||
46 | }); | 89 | }); |
47 | chart.draw(data, options); | 90 | } else { |
48 | } | 91 | measurement_chart= echarts.init(chart_div, null, { |
92 | height: 320 | ||
93 | }); | ||
94 | } | ||
95 | // Change chart size with browser resize | ||
96 | window.addEventListener('resize', function() { | ||
97 | measurement_chart.resize(); | ||
98 | }); | ||
99 | measurement_chart.setOption(option); | ||
49 | </script> | 100 | </script> |
50 | |||
diff --git a/scripts/lib/build_perf/html/report.html b/scripts/lib/build_perf/html/report.html index d1ba6f2578..537ed3ee52 100644 --- a/scripts/lib/build_perf/html/report.html +++ b/scripts/lib/build_perf/html/report.html | |||
@@ -3,11 +3,7 @@ | |||
3 | <head> | 3 | <head> |
4 | {# Scripts, for visualization#} | 4 | {# Scripts, for visualization#} |
5 | <!--START-OF-SCRIPTS--> | 5 | <!--START-OF-SCRIPTS--> |
6 | <script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script> | 6 | <script src=" https://cdn.jsdelivr.net/npm/echarts@5.5.0/dist/echarts.min.js "></script> |
7 | <script type="text/javascript"> | ||
8 | google.charts.load('current', {'packages':['corechart']}); | ||
9 | var chartsDrawing = 0; | ||
10 | </script> | ||
11 | 7 | ||
12 | {# Render measurement result charts #} | 8 | {# Render measurement result charts #} |
13 | {% for test in test_data %} | 9 | {% for test in test_data %} |
@@ -23,28 +19,29 @@ var chartsDrawing = 0; | |||
23 | 19 | ||
24 | {# Styles #} | 20 | {# Styles #} |
25 | <style> | 21 | <style> |
22 | :root { | ||
23 | --text: #000; | ||
24 | --bg: #fff; | ||
25 | --h2heading: #707070; | ||
26 | --link: #0000EE; | ||
27 | --trtopborder: #9ca3af; | ||
28 | --trborder: #e5e7eb; | ||
29 | --chartborder: #f0f0f0; | ||
30 | } | ||
26 | .meta-table { | 31 | .meta-table { |
27 | font-size: 14px; | 32 | font-size: 14px; |
28 | text-align: left; | 33 | text-align: left; |
29 | border-collapse: collapse; | 34 | border-collapse: collapse; |
30 | } | 35 | } |
31 | .meta-table tr:nth-child(even){background-color: #f2f2f2} | ||
32 | meta-table th, .meta-table td { | ||
33 | padding: 4px; | ||
34 | } | ||
35 | .summary { | 36 | .summary { |
36 | margin: 0; | ||
37 | font-size: 14px; | 37 | font-size: 14px; |
38 | text-align: left; | 38 | text-align: left; |
39 | border-collapse: collapse; | 39 | border-collapse: collapse; |
40 | } | 40 | } |
41 | summary th, .meta-table td { | ||
42 | padding: 4px; | ||
43 | } | ||
44 | .measurement { | 41 | .measurement { |
45 | padding: 8px 0px 8px 8px; | 42 | padding: 8px 0px 8px 8px; |
46 | border: 2px solid #f0f0f0; | 43 | border: 2px solid var(--chartborder); |
47 | margin-bottom: 10px; | 44 | margin: 1.5rem 0; |
48 | } | 45 | } |
49 | .details { | 46 | .details { |
50 | margin: 0; | 47 | margin: 0; |
@@ -64,18 +61,71 @@ summary th, .meta-table td { | |||
64 | background-color: #f0f0f0; | 61 | background-color: #f0f0f0; |
65 | margin-left: 10px; | 62 | margin-left: 10px; |
66 | } | 63 | } |
67 | hr { | 64 | .card-container { |
68 | color: #f0f0f0; | 65 | border-bottom-width: 1px; |
66 | padding: 1.25rem 3rem; | ||
67 | box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); | ||
68 | border-radius: 0.25rem; | ||
69 | } | ||
70 | body { | ||
71 | font-family: 'Helvetica', sans-serif; | ||
72 | margin: 3rem 8rem; | ||
73 | background-color: var(--bg); | ||
74 | color: var(--text); | ||
75 | } | ||
76 | h1 { | ||
77 | text-align: center; | ||
69 | } | 78 | } |
70 | h2 { | 79 | h2 { |
71 | font-size: 20px; | 80 | font-size: 1.5rem; |
72 | margin-bottom: 0px; | 81 | margin-bottom: 0px; |
73 | color: #707070; | 82 | color: var(--h2heading); |
83 | padding-top: 1.5rem; | ||
74 | } | 84 | } |
75 | h3 { | 85 | h3 { |
76 | font-size: 16px; | 86 | font-size: 1.3rem; |
77 | margin: 0px; | 87 | margin: 0px; |
78 | color: #707070; | 88 | color: var(--h2heading); |
89 | padding: 1.5rem 0; | ||
90 | } | ||
91 | h4 { | ||
92 | font-size: 14px; | ||
93 | font-weight: lighter; | ||
94 | line-height: 1.2rem; | ||
95 | margin: auto; | ||
96 | padding-top: 1rem; | ||
97 | } | ||
98 | table { | ||
99 | margin-top: 1.5rem; | ||
100 | line-height: 2rem; | ||
101 | } | ||
102 | tr { | ||
103 | border-bottom: 1px solid var(--trborder); | ||
104 | } | ||
105 | tr:first-child { | ||
106 | border-bottom: 1px solid var(--trtopborder); | ||
107 | } | ||
108 | tr:last-child { | ||
109 | border-bottom: none; | ||
110 | } | ||
111 | a { | ||
112 | text-decoration: none; | ||
113 | font-weight: bold; | ||
114 | color: var(--link); | ||
115 | } | ||
116 | a:hover { | ||
117 | color: #8080ff; | ||
118 | } | ||
119 | @media (prefers-color-scheme: dark) { | ||
120 | :root { | ||
121 | --text: #e9e8fa; | ||
122 | --bg: #0F0C28; | ||
123 | --h2heading: #B8B7CB; | ||
124 | --link: #87cefa; | ||
125 | --trtopborder: #394150; | ||
126 | --trborder: #212936; | ||
127 | --chartborder: #b1b0bf; | ||
128 | } | ||
79 | } | 129 | } |
80 | </style> | 130 | </style> |
81 | 131 | ||
@@ -83,13 +133,14 @@ h3 { | |||
83 | </head> | 133 | </head> |
84 | 134 | ||
85 | {% macro poky_link(commit) -%} | 135 | {% macro poky_link(commit) -%} |
86 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> | 136 | <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?id={{ commit }}">{{ commit[0:11] }}</a> |
87 | {%- endmacro %} | 137 | {%- endmacro %} |
88 | 138 | ||
89 | <body><div style="width: 700px"> | 139 | <body><div> |
140 | <h1 style="text-align: center;">Performance Test Report</h1> | ||
90 | {# Test metadata #} | 141 | {# Test metadata #} |
91 | <h2>General</h2> | 142 | <h2>General</h2> |
92 | <hr> | 143 | <h4>The table provides an overview of the comparison between two selected commits from the same branch.</h4> |
93 | <table class="meta-table" style="width: 100%"> | 144 | <table class="meta-table" style="width: 100%"> |
94 | <tr> | 145 | <tr> |
95 | <th></th> | 146 | <th></th> |
@@ -112,19 +163,21 @@ h3 { | |||
112 | 163 | ||
113 | {# Test result summary #} | 164 | {# Test result summary #} |
114 | <h2>Test result summary</h2> | 165 | <h2>Test result summary</h2> |
115 | <hr> | 166 | <h4>The test summary presents a thorough breakdown of each test conducted on the branch, including details such as build time and disk space consumption. Additionally, it gives insights into the average time taken for test execution, along with absolute and relative values for a better understanding.</h4> |
116 | <table class="summary" style="width: 100%"> | 167 | <table class="summary" style="width: 100%"> |
168 | <tr> | ||
169 | <th>Test name</th> | ||
170 | <th>Measurement description</th> | ||
171 | <th>Mean value</th> | ||
172 | <th>Absolute difference</th> | ||
173 | <th>Relative difference</th> | ||
174 | </tr> | ||
117 | {% for test in test_data %} | 175 | {% for test in test_data %} |
118 | {% if loop.index is even %} | ||
119 | {% set row_style = 'style="background-color: #f2f2f2"' %} | ||
120 | {% else %} | ||
121 | {% set row_style = 'style="background-color: #ffffff"' %} | ||
122 | {% endif %} | ||
123 | {% if test.status == 'SUCCESS' %} | 176 | {% if test.status == 'SUCCESS' %} |
124 | {% for measurement in test.measurements %} | 177 | {% for measurement in test.measurements %} |
125 | <tr {{ row_style }}> | 178 | <tr {{ row_style }}> |
126 | {% if loop.index == 1 %} | 179 | {% if loop.index == 1 %} |
127 | <td>{{ test.name }}: {{ test.description }}</td> | 180 | <td><a href=#{{test.name}}>{{ test.name }}: {{ test.description }}</a></td> |
128 | {% else %} | 181 | {% else %} |
129 | {# add empty cell in place of the test name#} | 182 | {# add empty cell in place of the test name#} |
130 | <td></td> | 183 | <td></td> |
@@ -153,10 +206,12 @@ h3 { | |||
153 | </table> | 206 | </table> |
154 | 207 | ||
155 | {# Detailed test results #} | 208 | {# Detailed test results #} |
209 | <h2>Test details</h2> | ||
210 | <h4>The following section provides details of each test, accompanied by charts representing build time and disk usage over time or by commit number.</h4> | ||
156 | {% for test in test_data %} | 211 | {% for test in test_data %} |
157 | <h2>{{ test.name }}: {{ test.description }}</h2> | 212 | <h3 style="color: #000;" id={{test.name}}>{{ test.name }}: {{ test.description }}</h3> |
158 | <hr> | ||
159 | {% if test.status == 'SUCCESS' %} | 213 | {% if test.status == 'SUCCESS' %} |
214 | <div class="card-container"> | ||
160 | {% for measurement in test.measurements %} | 215 | {% for measurement in test.measurements %} |
161 | <div class="measurement"> | 216 | <div class="measurement"> |
162 | <h3>{{ measurement.description }}</h3> | 217 | <h3>{{ measurement.description }}</h3> |
@@ -275,7 +330,8 @@ h3 { | |||
275 | {% endif %} | 330 | {% endif %} |
276 | {% endif %} | 331 | {% endif %} |
277 | </div> | 332 | </div> |
278 | {% endfor %} | 333 | {% endfor %} |
334 | </div> | ||
279 | {# Unsuccessful test #} | 335 | {# Unsuccessful test #} |
280 | {% else %} | 336 | {% else %} |
281 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} | 337 | <span style="font-size: 150%; font-weight: bold; color: red;">{{ test.status }} |
diff --git a/scripts/lib/build_perf/report.py b/scripts/lib/build_perf/report.py index ab77424cc7..f4e6a92e09 100644 --- a/scripts/lib/build_perf/report.py +++ b/scripts/lib/build_perf/report.py | |||
@@ -294,7 +294,7 @@ class SizeVal(MeasurementVal): | |||
294 | return "null" | 294 | return "null" |
295 | return self / 1024 | 295 | return self / 1024 |
296 | 296 | ||
297 | def measurement_stats(meas, prefix=''): | 297 | def measurement_stats(meas, prefix='', time=0): |
298 | """Get statistics of a measurement""" | 298 | """Get statistics of a measurement""" |
299 | if not meas: | 299 | if not meas: |
300 | return {prefix + 'sample_cnt': 0, | 300 | return {prefix + 'sample_cnt': 0, |
@@ -319,6 +319,8 @@ def measurement_stats(meas, prefix=''): | |||
319 | stats['quantity'] = val_cls.quantity | 319 | stats['quantity'] = val_cls.quantity |
320 | stats[prefix + 'sample_cnt'] = len(values) | 320 | stats[prefix + 'sample_cnt'] = len(values) |
321 | 321 | ||
322 | # Add start time for both type sysres and disk usage | ||
323 | start_time = time | ||
322 | mean_val = val_cls(mean(values)) | 324 | mean_val = val_cls(mean(values)) |
323 | min_val = val_cls(min(values)) | 325 | min_val = val_cls(min(values)) |
324 | max_val = val_cls(max(values)) | 326 | max_val = val_cls(max(values)) |
@@ -334,6 +336,7 @@ def measurement_stats(meas, prefix=''): | |||
334 | stats[prefix + 'max'] = max_val | 336 | stats[prefix + 'max'] = max_val |
335 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) | 337 | stats[prefix + 'minus'] = val_cls(mean_val - min_val) |
336 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) | 338 | stats[prefix + 'plus'] = val_cls(max_val - mean_val) |
339 | stats[prefix + 'start_time'] = start_time | ||
337 | 340 | ||
338 | return stats | 341 | return stats |
339 | 342 | ||
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index bd009f44b1..1d0fe13788 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -387,6 +387,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
387 | ret[split[3]] = split[0:3] | 387 | ret[split[3]] = split[0:3] |
388 | return ret | 388 | return ret |
389 | 389 | ||
390 | def _git_modified(repodir): | ||
391 | """List the difference between HEAD and the index""" | ||
392 | import bb | ||
393 | cmd = ['git', 'status', '--porcelain'] | ||
394 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
395 | ret = [] | ||
396 | if out: | ||
397 | for line in out.split("\n"): | ||
398 | if line and not line.startswith('??'): | ||
399 | ret.append(line[3:]) | ||
400 | return ret | ||
401 | |||
402 | |||
390 | def _git_exclude_path(srctree, path): | 403 | def _git_exclude_path(srctree, path): |
391 | """Return pathspec (list of paths) that excludes certain path""" | 404 | """Return pathspec (list of paths) that excludes certain path""" |
392 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 405 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -460,32 +473,6 @@ def sync(args, config, basepath, workspace): | |||
460 | finally: | 473 | finally: |
461 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
462 | 475 | ||
463 | def symlink_oelocal_files_srctree(rd, srctree): | ||
464 | import oe.patch | ||
465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
467 | # (otherwise the recipe won't build as expected) | ||
468 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
469 | addfiles = [] | ||
470 | for root, _, files in os.walk(local_files_dir): | ||
471 | relpth = os.path.relpath(root, local_files_dir) | ||
472 | if relpth != '.': | ||
473 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
474 | for fn in files: | ||
475 | if fn == '.gitignore': | ||
476 | continue | ||
477 | destpth = os.path.join(srctree, relpth, fn) | ||
478 | if os.path.exists(destpth): | ||
479 | os.unlink(destpth) | ||
480 | if relpth != '.': | ||
481 | back_relpth = os.path.relpath(local_files_dir, root) | ||
482 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
483 | else: | ||
484 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
485 | addfiles.append(os.path.join(relpth, fn)) | ||
486 | if addfiles: | ||
487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) | ||
488 | |||
489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
490 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
491 | import oe.recipeutils | 478 | import oe.recipeutils |
@@ -657,9 +644,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
657 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
659 | 646 | ||
660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
662 | |||
663 | if sync: | 647 | if sync: |
664 | try: | 648 | try: |
665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) |
@@ -674,29 +658,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
674 | except bb.process.ExecutionError as e: | 658 | except bb.process.ExecutionError as e: |
675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
676 | 660 | ||
677 | # Move the oe-local-files directory to srctree. | ||
678 | # As oe-local-files is not part of the constructed git tree, | ||
679 | # removing it directly during the synchronization might surprise | ||
680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
681 | # the user in the log message. | ||
682 | if os.path.exists(srctree_localdir + '.bak'): | ||
683 | shutil.rmtree(srctree_localdir + '.bak') | ||
684 | |||
685 | if os.path.exists(srctree_localdir): | ||
686 | logger.info('Backing up current local file directory %s' % srctree_localdir) | ||
687 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
688 | |||
689 | if os.path.exists(tempdir_localdir): | ||
690 | logger.info('Syncing local source files to srctree...') | ||
691 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
692 | else: | 661 | else: |
693 | # Move oe-local-files directory to srctree | ||
694 | if os.path.exists(tempdir_localdir): | ||
695 | logger.info('Adding local source files to srctree...') | ||
696 | shutil.move(tempdir_localdir, srcsubdir) | ||
697 | |||
698 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
699 | symlink_oelocal_files_srctree(d, srctree) | ||
700 | 663 | ||
701 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
702 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -852,34 +815,22 @@ def modify(args, config, basepath, workspace): | |||
852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 815 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
853 | oe.path.copyhardlinktree(srcdir, srctree) | 816 | oe.path.copyhardlinktree(srcdir, srctree) |
854 | workdir = rd.getVar('WORKDIR') | 817 | workdir = rd.getVar('WORKDIR') |
818 | unpackdir = rd.getVar('UNPACKDIR') | ||
855 | srcsubdir = rd.getVar('S') | 819 | srcsubdir = rd.getVar('S') |
856 | localfilesdir = os.path.join(srctree, 'oe-local-files') | 820 | localfilesdir = os.path.join(srctree, 'oe-local-files') |
857 | # Move local source files into separate subdir | ||
858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
859 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
860 | 821 | ||
861 | for key in local_files.copy(): | 822 | # Add locally copied files to gitignore as we add back to the metadata directly |
862 | if key.endswith('scc'): | 823 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
863 | sccfile = open(local_files[key], 'r') | ||
864 | for l in sccfile: | ||
865 | line = l.split() | ||
866 | if line and line[0] in ('kconf', 'patch'): | ||
867 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
868 | if not cfg in local_files.values(): | ||
869 | local_files[line[-1]] = cfg | ||
870 | shutil.copy2(cfg, workdir) | ||
871 | sccfile.close() | ||
872 | |||
873 | # Ignore local files with subdir={BP} | ||
874 | srcabspath = os.path.abspath(srcsubdir) | 824 | srcabspath = os.path.abspath(srcsubdir) |
875 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 825 | local_files = [fname for fname in local_files if |
826 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
827 | srcabspath == unpackdir] | ||
876 | if local_files: | 828 | if local_files: |
877 | for fname in local_files: | 829 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 830 | f.write('# Ignore local files, by default. Remove following lines' |
879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 831 | 'if you want to commit the directory to Git\n') |
880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') | 832 | for fname in local_files: |
881 | 833 | f.write('%s\n' % fname) | |
882 | symlink_oelocal_files_srctree(rd, srctree) | ||
883 | 834 | ||
884 | task = 'do_configure' | 835 | task = 'do_configure' |
885 | res = tinfoil.build_targets(pn, task, handle_events=True) | 836 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -904,7 +855,10 @@ def modify(args, config, basepath, workspace): | |||
904 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) | 855 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
905 | commits["."] = stdout.split() | 856 | commits["."] = stdout.split() |
906 | check_commits = True | 857 | check_commits = True |
907 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | 858 | try: |
859 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
860 | except bb.process.ExecutionError: | ||
861 | stdout = "" | ||
908 | for line in stdout.splitlines(): | 862 | for line in stdout.splitlines(): |
909 | (rev, submodule_path) = line.split() | 863 | (rev, submodule_path) = line.split() |
910 | submodule = os.path.relpath(submodule_path, srctree) | 864 | submodule = os.path.relpath(submodule_path, srctree) |
@@ -1475,6 +1429,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1475 | # Instead they are directly copied over the original source files (in | 1429 | # Instead they are directly copied over the original source files (in |
1476 | # recipe space). | 1430 | # recipe space). |
1477 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1431 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1432 | |||
1478 | new_set = None | 1433 | new_set = None |
1479 | updated = OrderedDict() | 1434 | updated = OrderedDict() |
1480 | added = OrderedDict() | 1435 | added = OrderedDict() |
@@ -1491,24 +1446,28 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1491 | if branchname.startswith(override_branch_prefix): | 1446 | if branchname.startswith(override_branch_prefix): |
1492 | return (updated, added, removed) | 1447 | return (updated, added, removed) |
1493 | 1448 | ||
1494 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1449 | files = _git_modified(srctree) |
1495 | git_files = _git_ls_tree(srctree) | 1450 | #if not files: |
1496 | if 'oe-local-files' in git_files: | 1451 | # files = _ls_tree(srctree) |
1497 | # If tracked by Git, take the files from srctree HEAD. First get | 1452 | for f in files: |
1498 | # the tree object of the directory | 1453 | fullfile = os.path.join(srctree, f) |
1499 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1454 | if os.path.exists(os.path.join(fullfile, ".git")): |
1500 | tree = git_files['oe-local-files'][2] | 1455 | # submodules handled elsewhere |
1501 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1456 | continue |
1502 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1457 | if f not in existing_files: |
1503 | GIT_INDEX_FILE=tmp_index)) | 1458 | added[f] = {} |
1504 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1459 | if os.path.isdir(os.path.join(srctree, f)): |
1505 | elif os.path.isdir(local_files_dir): | 1460 | shutil.copytree(fullfile, os.path.join(destdir, f)) |
1506 | # If not tracked by Git, just copy from working copy | 1461 | else: |
1507 | new_set = _ls_tree(local_files_dir) | 1462 | shutil.copy2(fullfile, os.path.join(destdir, f)) |
1508 | bb.process.run(['cp', '-ax', | 1463 | elif not os.path.exists(fullfile): |
1509 | os.path.join(local_files_dir, '.'), destdir]) | 1464 | removed[f] = existing_files[f] |
1510 | else: | 1465 | elif f in existing_files: |
1511 | new_set = [] | 1466 | updated[f] = {'path' : existing_files[f]} |
1467 | if os.path.isdir(os.path.join(srctree, f)): | ||
1468 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1469 | else: | ||
1470 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1512 | 1471 | ||
1513 | # Special handling for kernel config | 1472 | # Special handling for kernel config |
1514 | if bb.data.inherits_class('kernel-yocto', rd): | 1473 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1516,17 +1475,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1516 | fragment_path = os.path.join(destdir, fragment_fn) | 1475 | fragment_path = os.path.join(destdir, fragment_fn) |
1517 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1476 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1518 | if os.path.exists(fragment_path): | 1477 | if os.path.exists(fragment_path): |
1519 | if fragment_fn not in new_set: | 1478 | if fragment_fn in removed: |
1520 | new_set.append(fragment_fn) | 1479 | del removed[fragment_fn] |
1521 | # Copy fragment to local-files | 1480 | if fragment_fn not in updated and fragment_fn not in added: |
1522 | if os.path.isdir(local_files_dir): | 1481 | added[fragment_fn] = {} |
1523 | shutil.copy2(fragment_path, local_files_dir) | ||
1524 | else: | 1482 | else: |
1525 | if fragment_fn in new_set: | 1483 | if fragment_fn in updated: |
1526 | new_set.remove(fragment_fn) | 1484 | revoved[fragment_fn] = updated[fragment_fn] |
1527 | # Remove fragment from local-files | 1485 | del updated[fragment_fn] |
1528 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1529 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1530 | 1486 | ||
1531 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1487 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1532 | # configuration fragment files that are consumed as source files | 1488 | # configuration fragment files that are consumed as source files |
@@ -1534,42 +1490,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1534 | if bb.data.inherits_class(frag_class, rd): | 1490 | if bb.data.inherits_class(frag_class, rd): |
1535 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1491 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1536 | if os.path.exists(srcpath): | 1492 | if os.path.exists(srcpath): |
1537 | if frag_name not in new_set: | 1493 | if frag_name in removed: |
1538 | new_set.append(frag_name) | 1494 | del removed[frag_name] |
1495 | if frag_name not in updated: | ||
1496 | added[frag_name] = {} | ||
1539 | # copy fragment into destdir | 1497 | # copy fragment into destdir |
1540 | shutil.copy2(srcpath, destdir) | 1498 | shutil.copy2(srcpath, destdir) |
1541 | # copy fragment into local files if exists | 1499 | |
1542 | if os.path.isdir(local_files_dir): | ||
1543 | shutil.copy2(srcpath, local_files_dir) | ||
1544 | |||
1545 | if new_set is not None: | ||
1546 | for fname in new_set: | ||
1547 | if fname in existing_files: | ||
1548 | origpath = existing_files.pop(fname) | ||
1549 | workpath = os.path.join(local_files_dir, fname) | ||
1550 | if not filecmp.cmp(origpath, workpath): | ||
1551 | updated[fname] = {'path' : origpath} | ||
1552 | elif fname != '.gitignore': | ||
1553 | added[fname] = {} | ||
1554 | |||
1555 | workdir = rd.getVar('WORKDIR') | ||
1556 | s = rd.getVar('S') | ||
1557 | if not s.endswith(os.sep): | ||
1558 | s += os.sep | ||
1559 | |||
1560 | if workdir != s: | ||
1561 | # Handle files where subdir= was specified | ||
1562 | for fname in list(existing_files.keys()): | ||
1563 | # FIXME handle both subdir starting with BP and not? | ||
1564 | fworkpath = os.path.join(workdir, fname) | ||
1565 | if fworkpath.startswith(s): | ||
1566 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1567 | if os.path.exists(fpath): | ||
1568 | origpath = existing_files.pop(fname) | ||
1569 | if not filecmp.cmp(origpath, fpath): | ||
1570 | updated[fpath] = {'path' : origpath} | ||
1571 | |||
1572 | removed = existing_files | ||
1573 | return (updated, added, removed) | 1500 | return (updated, added, removed) |
1574 | 1501 | ||
1575 | 1502 | ||
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index fa5b8ef3c7..a8130ed23f 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -32,7 +32,7 @@ def _run(cmd, cwd=''): | |||
32 | 32 | ||
33 | def _get_srctree(tmpdir): | 33 | def _get_srctree(tmpdir): |
34 | srctree = tmpdir | 34 | srctree = tmpdir |
35 | dirs = scriptutils.filter_src_subdirs(tmpdir) | 35 | dirs = os.listdir(tmpdir) |
36 | if len(dirs) == 1: | 36 | if len(dirs) == 1: |
37 | srctree = os.path.join(tmpdir, dirs[0]) | 37 | srctree = os.path.join(tmpdir, dirs[0]) |
38 | else: | 38 | else: |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index 8e9ff38db6..066366e34f 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -528,7 +528,7 @@ def create_recipe(args): | |||
528 | if ftmpdir and args.keep_temp: | 528 | if ftmpdir and args.keep_temp: |
529 | logger.info('Fetch temp directory is %s' % ftmpdir) | 529 | logger.info('Fetch temp directory is %s' % ftmpdir) |
530 | 530 | ||
531 | dirlist = scriptutils.filter_src_subdirs(srctree) | 531 | dirlist = os.listdir(srctree) |
532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | 532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) |
533 | if len(dirlist) == 1: | 533 | if len(dirlist) == 1: |
534 | singleitem = os.path.join(srctree, dirlist[0]) | 534 | singleitem = os.path.join(srctree, dirlist[0]) |
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index f23e53cba9..81f0b01fa5 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -179,6 +179,8 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
179 | f.write('SRCREV = "%s"\n' % srcrev) | 179 | f.write('SRCREV = "%s"\n' % srcrev) |
180 | f.write('PV = "0.0+"\n') | 180 | f.write('PV = "0.0+"\n') |
181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
183 | |||
182 | # Set S out of the way so it doesn't get created under the workdir | 184 | # Set S out of the way so it doesn't get created under the workdir |
183 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) | 185 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) |
184 | if not mirrors: | 186 | if not mirrors: |
@@ -232,10 +234,6 @@ def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirr | |||
232 | if e.errno != errno.ENOTEMPTY: | 234 | if e.errno != errno.ENOTEMPTY: |
233 | raise | 235 | raise |
234 | 236 | ||
235 | bb.utils.mkdirhier(destdir) | ||
236 | for fn in os.listdir(tmpworkdir): | ||
237 | shutil.move(os.path.join(tmpworkdir, fn), destdir) | ||
238 | |||
239 | finally: | 237 | finally: |
240 | if not preserve_tmp: | 238 | if not preserve_tmp: |
241 | shutil.rmtree(tmpdir) | 239 | shutil.rmtree(tmpdir) |
@@ -271,12 +269,3 @@ def is_src_url(param): | |||
271 | return True | 269 | return True |
272 | return False | 270 | return False |
273 | 271 | ||
274 | def filter_src_subdirs(pth): | ||
275 | """ | ||
276 | Filter out subdirectories of initial unpacked source trees that we do not care about. | ||
277 | Used by devtool and recipetool. | ||
278 | """ | ||
279 | dirlist = os.listdir(pth) | ||
280 | filterout = ['git.indirectionsymlink', 'source-date-epoch', 'sstate-install-recipe_qa'] | ||
281 | dirlist = [x for x in dirlist if x not in filterout] | ||
282 | return dirlist | ||
diff --git a/scripts/oe-build-perf-report b/scripts/oe-build-perf-report index 7812ea4540..6c3c726ee3 100755 --- a/scripts/oe-build-perf-report +++ b/scripts/oe-build-perf-report | |||
@@ -336,8 +336,12 @@ def print_html_report(data, id_comp, buildstats): | |||
336 | test_i = test_data['tests'][test] | 336 | test_i = test_data['tests'][test] |
337 | meas_i = test_i['measurements'][meas] | 337 | meas_i = test_i['measurements'][meas] |
338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') | 338 | commit_num = get_data_item(meta, 'layers.meta.commit_count') |
339 | samples.append(measurement_stats(meas_i)) | 339 | commit = get_data_item(meta, 'layers.meta.commit') |
340 | # Add start_time for both test measurement types of sysres and disk usage | ||
341 | start_time = test_i['start_time'][0] | ||
342 | samples.append(measurement_stats(meas_i, '', start_time)) | ||
340 | samples[-1]['commit_num'] = commit_num | 343 | samples[-1]['commit_num'] = commit_num |
344 | samples[-1]['commit'] = commit | ||
341 | 345 | ||
342 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) | 346 | absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) |
343 | reldiff = absdiff * 100 / samples[id_comp]['mean'] | 347 | reldiff = absdiff * 100 / samples[id_comp]['mean'] |
@@ -473,7 +477,7 @@ Examine build performance test results from a Git repository""" | |||
473 | group.add_argument('--branch', '-B', default='master', help="Branch to find commit in") | 477 | group.add_argument('--branch', '-B', default='master', help="Branch to find commit in") |
474 | group.add_argument('--branch2', help="Branch to find comparision revisions in") | 478 | group.add_argument('--branch2', help="Branch to find comparision revisions in") |
475 | group.add_argument('--machine', default='qemux86') | 479 | group.add_argument('--machine', default='qemux86') |
476 | group.add_argument('--history-length', default=25, type=int, | 480 | group.add_argument('--history-length', default=300, type=int, |
477 | help="Number of tested revisions to plot in html report") | 481 | help="Number of tested revisions to plot in html report") |
478 | group.add_argument('--commit', | 482 | group.add_argument('--commit', |
479 | help="Revision to search for") | 483 | help="Revision to search for") |