summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa')
-rw-r--r--meta/lib/oeqa/core/target/serial.py315
-rw-r--r--meta/lib/oeqa/core/target/ssh.py16
-rw-r--r--meta/lib/oeqa/core/tests/common.py1
-rw-r--r--meta/lib/oeqa/manual/crops.json294
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json322
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt8
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py31
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py23
-rw-r--r--meta/lib/oeqa/runtime/context.py12
-rw-r--r--meta/lib/oeqa/sdk/case.py9
-rw-r--r--meta/lib/oeqa/sdk/cases/autotools.py (renamed from meta/lib/oeqa/sdk/cases/buildcpio.py)7
-rw-r--r--meta/lib/oeqa/sdk/cases/cmake.py (renamed from meta/lib/oeqa/sdk/cases/assimp.py)16
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py4
-rw-r--r--meta/lib/oeqa/sdk/cases/gtk3.py (renamed from meta/lib/oeqa/sdk/cases/buildgalculator.py)6
-rw-r--r--meta/lib/oeqa/sdk/cases/kmod.py41
-rw-r--r--meta/lib/oeqa/sdk/cases/makefile.py (renamed from meta/lib/oeqa/sdk/cases/buildlzip.py)10
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py1
-rw-r--r--meta/lib/oeqa/sdk/cases/meson.py (renamed from meta/lib/oeqa/sdk/cases/buildepoxy.py)8
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py1
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/bbclasses.py106
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py51
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py109
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py20
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py264
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py15
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py41
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/picolibc.py18
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/retain.py241
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py7
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py118
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py134
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py47
-rw-r--r--meta/lib/oeqa/selftest/context.py5
-rw-r--r--meta/lib/oeqa/utils/__init__.py8
-rw-r--r--meta/lib/oeqa/utils/commands.py18
-rw-r--r--meta/lib/oeqa/utils/postactions.py69
56 files changed, 1485 insertions, 1054 deletions
diff --git a/meta/lib/oeqa/core/target/serial.py b/meta/lib/oeqa/core/target/serial.py
new file mode 100644
index 0000000000..7c2cd8b248
--- /dev/null
+++ b/meta/lib/oeqa/core/target/serial.py
@@ -0,0 +1,315 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import base64
6import logging
7import os
8from threading import Lock
9from . import OETarget
10
11class OESerialTarget(OETarget):
12
13 def __init__(self, logger, target_ip, server_ip, server_port=0,
14 timeout=300, serialcontrol_cmd=None, serialcontrol_extra_args=None,
15 serialcontrol_ps1=None, serialcontrol_connect_timeout=None,
16 machine=None, **kwargs):
17 if not logger:
18 logger = logging.getLogger('target')
19 logger.setLevel(logging.INFO)
20 filePath = os.path.join(os.getcwd(), 'remoteTarget.log')
21 fileHandler = logging.FileHandler(filePath, 'w', 'utf-8')
22 formatter = logging.Formatter(
23 '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s',
24 '%H:%M:%S')
25 fileHandler.setFormatter(formatter)
26 logger.addHandler(fileHandler)
27
28 super(OESerialTarget, self).__init__(logger)
29
30 if serialcontrol_ps1:
31 self.target_ps1 = serialcontrol_ps1
32 elif machine:
33 # fallback to a default value which assumes root@machine
34 self.target_ps1 = f'root@{machine}:.*# '
35 else:
36 raise ValueError("Unable to determine shell command prompt (PS1) format.")
37
38 if not serialcontrol_cmd:
39 raise ValueError("Unable to determine serial control command.")
40
41 if serialcontrol_extra_args:
42 self.connection_script = f'{serialcontrol_cmd} {serialcontrol_extra_args}'
43 else:
44 self.connection_script = serialcontrol_cmd
45
46 if serialcontrol_connect_timeout:
47 self.connect_timeout = serialcontrol_connect_timeout
48 else:
49 self.connect_timeout = 10 # default to 10s connection timeout
50
51 self.default_command_timeout = timeout
52 self.ip = target_ip
53 self.server_ip = server_ip
54 self.server_port = server_port
55 self.conn = None
56 self.mutex = Lock()
57
58 def start(self, **kwargs):
59 pass
60
61 def stop(self, **kwargs):
62 pass
63
64 def get_connection(self):
65 if self.conn is None:
66 self.conn = SerialConnection(self.connection_script,
67 self.target_ps1,
68 self.connect_timeout,
69 self.default_command_timeout)
70
71 return self.conn
72
73 def run(self, cmd, timeout=None):
74 """
75 Runs command on target over the provided serial connection.
76 The first call will open the connection, and subsequent
77 calls will re-use the same connection to send new commands.
78
79 command: Command to run on target.
80 timeout: <value>: Kill command after <val> seconds.
81 None: Kill command default value seconds.
82 0: No timeout, runs until return.
83 """
84 # Lock needed to avoid multiple threads running commands concurrently
85 # A serial connection can only be used by one caller at a time
86 with self.mutex:
87 conn = self.get_connection()
88
89 self.logger.debug(f"[Running]$ {cmd}")
90 # Run the command, then echo $? to get the command's return code
91 try:
92 output = conn.run_command(cmd, timeout)
93 status = conn.run_command("echo $?")
94 self.logger.debug(f" [stdout]: {output}")
95 self.logger.debug(f" [ret code]: {status}\n\n")
96 except SerialTimeoutException as e:
97 self.logger.debug(e)
98 output = ""
99 status = 255
100
101 # Return to $HOME after each command to simulate a stateless SSH connection
102 conn.run_command('cd "$HOME"')
103
104 return (int(status), output)
105
106 def copyTo(self, localSrc, remoteDst):
107 """
108 Copies files by converting them to base 32, then transferring
109 the ASCII text to the target, and decoding it in place on the
110 target.
111
112 On a 115k baud serial connection, this method transfers at
113 roughly 30kbps.
114 """
115 with open(localSrc, 'rb') as file:
116 data = file.read()
117
118 b32 = base64.b32encode(data).decode('utf-8')
119
120 # To avoid shell line limits, send a chunk at a time
121 SPLIT_LEN = 512
122 lines = [b32[i:i+SPLIT_LEN] for i in range(0, len(b32), SPLIT_LEN)]
123
124 with self.mutex:
125 conn = self.get_connection()
126
127 filename = os.path.basename(localSrc)
128 TEMP = f'/tmp/{filename}.b32'
129
130 # Create or empty out the temp file
131 conn.run_command(f'echo -n "" > {TEMP}')
132
133 for line in lines:
134 conn.run_command(f'echo -n {line} >> {TEMP}')
135
136 # Check to see whether the remoteDst is a directory
137 is_directory = conn.run_command(f'[[ -d {remoteDst} ]]; echo $?')
138 if int(is_directory) == 0:
139 # append the localSrc filename to the end of remoteDst
140 remoteDst = os.path.join(remoteDst, filename)
141
142 conn.run_command(f'base32 -d {TEMP} > {remoteDst}')
143 conn.run_command(f'rm {TEMP}')
144
145 return 0, 'Success'
146
147 def copyFrom(self, remoteSrc, localDst):
148 """
149 Copies files by converting them to base 32 on the target, then
150 transferring the ASCII text to the host. That text is then
151 decoded here and written out to the destination.
152
153 On a 115k baud serial connection, this method transfers at
154 roughly 30kbps.
155 """
156 with self.mutex:
157 b32 = self.get_connection().run_command(f'base32 {remoteSrc}')
158
159 data = base64.b32decode(b32.replace('\r\n', ''))
160
161 # If the local path is a directory, get the filename from
162 # the remoteSrc path and append it to localDst
163 if os.path.isdir(localDst):
164 filename = os.path.basename(remoteSrc)
165 localDst = os.path.join(localDst, filename)
166
167 with open(localDst, 'wb') as file:
168 file.write(data)
169
170 return 0, 'Success'
171
172 def copyDirTo(self, localSrc, remoteDst):
173 """
174 Copy recursively localSrc directory to remoteDst in target.
175 """
176
177 for root, dirs, files in os.walk(localSrc):
178 # Create directories in the target as needed
179 for d in dirs:
180 tmpDir = os.path.join(root, d).replace(localSrc, "")
181 newDir = os.path.join(remoteDst, tmpDir.lstrip("/"))
182 cmd = "mkdir -p %s" % newDir
183 self.run(cmd)
184
185 # Copy files into the target
186 for f in files:
187 tmpFile = os.path.join(root, f).replace(localSrc, "")
188 dstFile = os.path.join(remoteDst, tmpFile.lstrip("/"))
189 srcFile = os.path.join(root, f)
190 self.copyTo(srcFile, dstFile)
191
192 def deleteFiles(self, remotePath, files):
193 """
194 Deletes files in target's remotePath.
195 """
196
197 cmd = "rm"
198 if not isinstance(files, list):
199 files = [files]
200
201 for f in files:
202 cmd = "%s %s" % (cmd, os.path.join(remotePath, f))
203
204 self.run(cmd)
205
206 def deleteDir(self, remotePath):
207 """
208 Deletes target's remotePath directory.
209 """
210
211 cmd = "rmdir %s" % remotePath
212 self.run(cmd)
213
214 def deleteDirStructure(self, localPath, remotePath):
215 """
216 Delete recursively localPath structure directory in target's remotePath.
217
218 This function is useful to delete a package that is installed in the
219 device under test (DUT) and the host running the test has such package
220 extracted in tmp directory.
221
222 Example:
223 pwd: /home/user/tmp
224 tree: .
225 └── work
226 ├── dir1
227 │   └── file1
228 └── dir2
229
230 localpath = "/home/user/tmp" and remotepath = "/home/user"
231
232 With the above variables this function will try to delete the
233 directory in the DUT in this order:
234 /home/user/work/dir1/file1
235 /home/user/work/dir1 (if dir is empty)
236 /home/user/work/dir2 (if dir is empty)
237 /home/user/work (if dir is empty)
238 """
239
240 for root, dirs, files in os.walk(localPath, topdown=False):
241 # Delete files first
242 tmpDir = os.path.join(root).replace(localPath, "")
243 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
244 self.deleteFiles(remoteDir, files)
245
246 # Remove dirs if empty
247 for d in dirs:
248 tmpDir = os.path.join(root, d).replace(localPath, "")
249 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
250 self.deleteDir(remoteDir)
251
252class SerialTimeoutException(Exception):
253 def __init__(self, msg):
254 self.msg = msg
255 def __str__(self):
256 return self.msg
257
258class SerialConnection:
259
260 def __init__(self, script, target_prompt, connect_timeout, default_command_timeout):
261 import pexpect # limiting scope to avoid build dependency
262 self.prompt = target_prompt
263 self.connect_timeout = connect_timeout
264 self.default_command_timeout = default_command_timeout
265 self.conn = pexpect.spawn('/bin/bash', ['-c', script], encoding='utf8')
266 self._seek_to_clean_shell()
267 # Disable echo to avoid the need to parse the outgoing command
268 self.run_command('stty -echo')
269
270 def _seek_to_clean_shell(self):
271 """
272 Attempts to find a clean shell, meaning it is clear and
273 ready to accept a new command. This is necessary to ensure
274 the correct output is captured from each command.
275 """
276 import pexpect # limiting scope to avoid build dependency
277 # Look for a clean shell
278 # Wait a short amount of time for the connection to finish
279 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT],
280 timeout=self.connect_timeout)
281
282 # if a timeout occurred, send an empty line and wait for a clean shell
283 if pexpect_code == 1:
284 # send a newline to clear and present the shell
285 self.conn.sendline("")
286 pexpect_code = self.conn.expect(self.prompt)
287
288 def run_command(self, cmd, timeout=None):
289 """
290 Runs command on target over the provided serial connection.
291 Returns any output on the shell while the command was run.
292
293 command: Command to run on target.
294 timeout: <value>: Kill command after <val> seconds.
295 None: Kill command default value seconds.
296 0: No timeout, runs until return.
297 """
298 import pexpect # limiting scope to avoid build dependency
299 # Convert from the OETarget defaults to pexpect timeout values
300 if timeout is None:
301 timeout = self.default_command_timeout
302 elif timeout == 0:
303 timeout = None # passing None to pexpect is infinite timeout
304
305 self.conn.sendline(cmd)
306 pexpect_code = self.conn.expect([self.prompt, pexpect.TIMEOUT], timeout=timeout)
307
308 # check for timeout
309 if pexpect_code == 1:
310 self.conn.send('\003') # send Ctrl+C
311 self._seek_to_clean_shell()
312 raise SerialTimeoutException(f'Timeout executing: {cmd} after {timeout}s')
313
314 return self.conn.before.removesuffix('\r\n')
315
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index 09cdd14c75..d473469384 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -55,14 +55,14 @@ class OESSHTarget(OETarget):
55 def stop(self, **kwargs): 55 def stop(self, **kwargs):
56 pass 56 pass
57 57
58 def _run(self, command, timeout=None, ignore_status=True): 58 def _run(self, command, timeout=None, ignore_status=True, raw=False):
59 """ 59 """
60 Runs command in target using SSHProcess. 60 Runs command in target using SSHProcess.
61 """ 61 """
62 self.logger.debug("[Running]$ %s" % " ".join(command)) 62 self.logger.debug("[Running]$ %s" % " ".join(command))
63 63
64 starttime = time.time() 64 starttime = time.time()
65 status, output = SSHCall(command, self.logger, timeout) 65 status, output = SSHCall(command, self.logger, timeout, raw)
66 self.logger.debug("[Command returned '%d' after %.2f seconds]" 66 self.logger.debug("[Command returned '%d' after %.2f seconds]"
67 "" % (status, time.time() - starttime)) 67 "" % (status, time.time() - starttime))
68 68
@@ -72,7 +72,7 @@ class OESSHTarget(OETarget):
72 72
73 return (status, output) 73 return (status, output)
74 74
75 def run(self, command, timeout=None, ignore_status=True): 75 def run(self, command, timeout=None, ignore_status=True, raw=False):
76 """ 76 """
77 Runs command in target. 77 Runs command in target.
78 78
@@ -91,7 +91,7 @@ class OESSHTarget(OETarget):
91 else: 91 else:
92 processTimeout = self.timeout 92 processTimeout = self.timeout
93 93
94 status, output = self._run(sshCmd, processTimeout, ignore_status) 94 status, output = self._run(sshCmd, processTimeout, ignore_status, raw)
95 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output)) 95 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
96 96
97 return (status, output) 97 return (status, output)
@@ -206,7 +206,7 @@ class OESSHTarget(OETarget):
206 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) 206 remoteDir = os.path.join(remotePath, tmpDir.lstrip("/"))
207 self.deleteDir(remoteDir) 207 self.deleteDir(remoteDir)
208 208
209def SSHCall(command, logger, timeout=None, **opts): 209def SSHCall(command, logger, timeout=None, raw=False, **opts):
210 210
211 def run(): 211 def run():
212 nonlocal output 212 nonlocal output
@@ -265,7 +265,7 @@ def SSHCall(command, logger, timeout=None, **opts):
265 else: 265 else:
266 output_raw = process.communicate()[0] 266 output_raw = process.communicate()[0]
267 267
268 output = output_raw.decode('utf-8', errors='ignore') 268 output = output_raw if raw else output_raw.decode('utf-8', errors='ignore')
269 logger.debug('Data from SSH call:\n%s' % output.rstrip()) 269 logger.debug('Data from SSH call:\n%s' % output.rstrip())
270 270
271 # timout or not, make sure process exits and is not hanging 271 # timout or not, make sure process exits and is not hanging
@@ -292,7 +292,7 @@ def SSHCall(command, logger, timeout=None, **opts):
292 292
293 options = { 293 options = {
294 "stdout": subprocess.PIPE, 294 "stdout": subprocess.PIPE,
295 "stderr": subprocess.STDOUT, 295 "stderr": subprocess.STDOUT if not raw else None,
296 "stdin": None, 296 "stdin": None,
297 "shell": False, 297 "shell": False,
298 "bufsize": -1, 298 "bufsize": -1,
@@ -320,4 +320,4 @@ def SSHCall(command, logger, timeout=None, **opts):
320 logger.debug('Something went wrong, killing SSH process') 320 logger.debug('Something went wrong, killing SSH process')
321 raise 321 raise
322 322
323 return (process.returncode, output.rstrip()) 323 return (process.returncode, output if raw else output.rstrip())
diff --git a/meta/lib/oeqa/core/tests/common.py b/meta/lib/oeqa/core/tests/common.py
index 88cc758ad3..bcc4fde632 100644
--- a/meta/lib/oeqa/core/tests/common.py
+++ b/meta/lib/oeqa/core/tests/common.py
@@ -9,7 +9,6 @@ import os
9 9
10import unittest 10import unittest
11import logging 11import logging
12import os
13 12
14logger = logging.getLogger("oeqa") 13logger = logging.getLogger("oeqa")
15logger.setLevel(logging.INFO) 14logger.setLevel(logging.INFO)
diff --git a/meta/lib/oeqa/manual/crops.json b/meta/lib/oeqa/manual/crops.json
deleted file mode 100644
index 5cfa653843..0000000000
--- a/meta/lib/oeqa/manual/crops.json
+++ /dev/null
@@ -1,294 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "crops-default.crops-default.sdkext_eSDK_devtool_build_make",
5 "author": [
6 {
7 "email": "francisco.j.pedraza.gonzalez@intel.com",
8 "name": "francisco.j.pedraza.gonzalez@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
14 "expected_results": ""
15 },
16 "2": {
17 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n\n",
22 "expected_results": ""
23 },
24 "4": {
25 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
26 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
27 },
28 "5": {
29 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
30 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces."
31 },
32 "6": {
33 "action": " source environment-setup-i586-poky-linux \n\n",
34 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
35 },
36 "7": {
37 "action": " run command which devtool \n\n",
38 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n "
39 },
40 "8": {
41 "action": "devtool add myapp <directory>(this is myapp dir) \n\n\n",
42 "expected_results": "The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb"
43 },
44 "9": {
45 "action": " devtool build myapp \n\n",
46 "expected_results": "This should compile an image"
47 },
48 "10": {
49 "action": " devtool reset myapp ",
50 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase."
51 }
52 },
53 "summary": "sdkext_eSDK_devtool_build_make"
54 }
55 },
56 {
57 "test": {
58 "@alias": "crops-default.crops-default.sdkext_devtool_build_esdk_package",
59 "author": [
60 {
61 "email": "francisco.j.pedraza.gonzalez@intel.com",
62 "name": "francisco.j.pedraza.gonzalez@intel.com"
63 }
64 ],
65 "execution": {
66 "1": {
67 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
68 "expected_results": ""
69 },
70 "2": {
71 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
72 "expected_results": ""
73 },
74 "3": {
75 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp/ \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
76 "expected_results": ""
77 },
78 "4": {
79 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
80 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
81 },
82 "5": {
83 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include<stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
84 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
85 },
86 "6": {
87 "action": " source environment-setup-i586-poky-linux \n\n",
88 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
89 },
90 "7": {
91 "action": " run command which devtool \n\n",
92 "expected_results": " this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
93 },
94 "8": {
95 "action": " devtool add myapp <directory> (this is myapp dir) \n\n",
96 "expected_results": " The directory you should input is the myapp directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
97 },
98 "9": {
99 "action": " devtool package myapp \n\n",
100 "expected_results": " you should expect a package creation of myapp and it should be under the /tmp/deploy/ \n\n"
101 },
102 "10": {
103 "action": " devtool reset myapp ",
104 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase.\n</package_format>"
105 }
106 },
107 "summary": "sdkext_devtool_build_esdk_package"
108 }
109 },
110 {
111 "test": {
112 "@alias": "crops-default.crops-default.sdkext_devtool_build_cmake",
113 "author": [
114 {
115 "email": "francisco.j.pedraza.gonzalez@intel.com",
116 "name": "francisco.j.pedraza.gonzalez@intel.com"
117 }
118 ],
119 "execution": {
120 "1": {
121 "action": "IMPORTANT NOTE: The firsts 5 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
122 "expected_results": ""
123 },
124 "2": {
125 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
126 "expected_results": ""
127 },
128 "3": {
129 "action": " Create the following tree of files <crops-esdk-workdir-workspace>/sdkext/files/myapp \n <crops-esdk-workdir-workspace>/sdkext/files/myapp_cmake \n\n",
130 "expected_results": ""
131 },
132 "4": {
133 "action": " Create the following files withing the myapp directory myapp.c and the Makefile. Write the following inside of each file: \n---------------------------------------- \nMakefile should contain \n\nall: myapp \n\nmyapp: myapp.o \n\t$(CC) $(LDFLAGS) $< -o $@ \n\nmyapp.o: myapp.c \n\t$(CC) $(CFLAGS) -c $< -o $@ \n\nclean: \n\trm -rf myapp.o myapp \n\n----------------------------- \nmyapp.c shold contain \n\n#include <stdio.h> \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n \n\treturn 0; \n} \n------------------------------------ \n\n",
134 "expected_results": "be sure that the indentations on the makefile are tabs not spaces. \n\n"
135 },
136 "5": {
137 "action": " Create the following files within the myapp_cmake directory CMakeLists.txt and myapp.c. Write the following inside each file: \n\n------------------------------------ \nCMakeLists.txt should contain: \n\ncmake_minimum_required (VERSION 2.6) \nproject (myapp) \n# The version number. \nset (myapp_VERSION_MAJOR 1) \nset (myapp_VERSION_MINOR 0) \n\n# add the executable \nadd_executable (myapp myapp.c) \n\ninstall(TARGETS myapp \nRUNTIME DESTINATION bin) \n\n------------------------------------------ \nmyapp.c should contain: \n\n#include \n\nint \nmain(int argc, char *argv[]) \n{ \n\tprintf(\"Hello world\\n\"); \n\n\treturn 0; \n} \n------------------------------------------------- \n\n",
138 "expected_results": "Be sure that the indentations on CMakeLists.txt is tabs not spaces. \n\n"
139 },
140 "6": {
141 "action": " source environment-setup-i586-poky-linux \n\n",
142 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
143 },
144 "7": {
145 "action": " run command which devtool \n\n",
146 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
147 },
148 "8": {
149 "action": " devtool add myapp <directory> (this is myapp_cmake dir) \n\n",
150 "expected_results": "The directory you should input is the myapp_cmake directory. This should automatically create the recipe myapp.bb under <crops-esdk-workdir-workspace>/recipes/myapp/myapp.bb \n\n"
151 },
152 "9": {
153 "action": " devtool build myapp \n\n",
154 "expected_results": "This should compile an image \n\n"
155 },
156 "10": {
157 "action": " devtool reset myapp ",
158 "expected_results": "This cleans sysroot of the myapp recipe, but it leaves the source tree intact. meaning it does not erase. "
159 }
160 },
161 "summary": "sdkext_devtool_build_cmake"
162 }
163 },
164 {
165 "test": {
166 "@alias": "crops-default.crops-default.sdkext_extend_autotools_recipe_creation",
167 "author": [
168 {
169 "email": "francisco.j.pedraza.gonzalez@intel.com",
170 "name": "francisco.j.pedraza.gonzalez@intel.com"
171 }
172 ],
173 "execution": {
174 "1": {
175 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n\n",
176 "expected_results": ""
177 },
178 "2": {
179 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
180 "expected_results": ""
181 },
182 "3": {
183 "action": " source environment-setup-i586-poky-linux \n\n",
184 "expected_results": " This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
185 },
186 "4": {
187 "action": "run command which devtool \n\n",
188 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
189 },
190 "5": {
191 "action": "devtool sdk-install -s libxml2 \n\n",
192 "expected_results": "this should install libxml2 \n\n"
193 },
194 "6": {
195 "action": "devtool add librdfa https://github.com/rdfa/librdfa \n\n",
196 "expected_results": "This should automatically create the recipe librdfa.bb under /recipes/librdfa/librdfa.bb \n\n"
197 },
198 "7": {
199 "action": "devtool build librdfa \n\n",
200 "expected_results": "This should compile \n\n"
201 },
202 "8": {
203 "action": "devtool reset librdfa ",
204 "expected_results": "This cleans sysroot of the librdfa recipe, but it leaves the source tree intact. meaning it does not erase."
205 }
206 },
207 "summary": "sdkext_extend_autotools_recipe_creation"
208 }
209 },
210 {
211 "test": {
212 "@alias": "crops-default.crops-default.sdkext_devtool_kernelmodule",
213 "author": [
214 {
215 "email": "francisco.j.pedraza.gonzalez@intel.com",
216 "name": "francisco.j.pedraza.gonzalez@intel.com"
217 }
218 ],
219 "execution": {
220 "1": {
221 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\n",
222 "expected_results": ""
223 },
224 "2": {
225 "action": " Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
226 "expected_results": ""
227 },
228 "3": {
229 "action": "source environment-setup-i586-poky-linux \n\n",
230 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n \n"
231 },
232 "4": {
233 "action": "run command which devtool \n\n",
234 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
235 },
236 "5": {
237 "action": "devtool add kernel-module-hello-world https://git.yoctoproject.org/git/kernel-module-hello-world \n\n",
238 "expected_results": "This should automatically create the recipe kernel-module-hello-world.bb under <crops-esdk-workdir-workspace>/recipes/kernel-module-hello-world/kernel-module-hello-world.bb "
239 },
240 "6": {
241 "action": "devtool build kernel-module-hello-world \n\n",
242 "expected_results": "This should compile an image \n\n"
243 },
244 "7": {
245 "action": "devtool reset kernel-module-hello-world ",
246 "expected_results": "This cleans sysroot of the kernel-module-hello-world recipe, but it leaves the source tree intact. meaning it does not erase."
247 }
248 },
249 "summary": "sdkext_devtool_kernelmodule"
250 }
251 },
252 {
253 "test": {
254 "@alias": "crops-default.crops-default.sdkext_recipes_for_nodejs",
255 "author": [
256 {
257 "email": "francisco.j.pedraza.gonzalez@intel.com",
258 "name": "francisco.j.pedraza.gonzalez@intel.com"
259 }
260 ],
261 "execution": {
262 "1": {
263 "action": "IMPORTANT NOTE: The firsts 2 steps refer to configuration of the environment to run the rest of the steps. These only apply for CROPS-eSDK. \n\n\nlets say variable npm = npm://registry.npmjs.org;name=winston;version=2.2.0 \n\n",
264 "expected_results": ""
265 },
266 "2": {
267 "action": "Initiate your Crops-esdk environment as it says in wiki https://github.com/crops/docker-win-mac-docs/wiki \n\n",
268 "expected_results": ""
269 },
270 "3": {
271 "action": "source environment-setup-i586-poky-linux \n\n",
272 "expected_results": "This should output a message that says SDK environment now set up; additionally you may now run devtool to perform development tasks etc etc ... \n\n"
273 },
274 "4": {
275 "action": "run command which devtool \n\n",
276 "expected_results": "this should output the directory of the devtool script and it should be within the sdk workdir you are working in. \n\n"
277 },
278 "5": {
279 "action": " 4a) git clone git://git.openembedded.org/meta-openembedded in layers/build directory \n \n4b) Add meta-openembedded/meta-oe in bblayer.conf as mentioned below: ${SDKBASEMETAPATH}/layers/build/meta-openembedded/meta-oe \\ \n\n4c) devtool add \"npm://registry.npmjs.org;name=npm;version=2.2.0\" \n\n",
280 "expected_results": " This should automatically create the recipe npm.bb under /recipes/npm/npm.bb \n\n"
281 },
282 "6": {
283 "action": "devtool build npm \n\n",
284 "expected_results": "This should compile an image \n\n"
285 },
286 "7": {
287 "action": " devtool reset npm",
288 "expected_results": "This cleans sysroot of the npm recipe, but it leaves the source tree intact. meaning it does not erase."
289 }
290 },
291 "summary": "sdkext_recipes_for_nodejs"
292 }
293 }
294]
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
deleted file mode 100644
index 6c110d0656..0000000000
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ /dev/null
@@ -1,322 +0,0 @@
1[
2 {
3 "test": {
4 "@alias": "eclipse-plugin.eclipse-plugin.support_SSH_connection_to_Target",
5 "author": [
6 {
7 "email": "ee.peng.yeoh@intel.com",
8 "name": "ee.peng.yeoh@intel.com"
9 }
10 ],
11 "execution": {
12 "1": {
13 "action": "In Eclipse, swich to Remote System Explorer to create a connention baseed on SSH, input the remote target IP address as the Host name, make sure disable the proxy in Window->Preferences->General->Network Connection, set Direct as Active Provider field. ",
14 "expected_results": "the connection based on SSH could be set up."
15 },
16 "2": {
17 "action": "Configure connection from Eclipse: Run->Run Configurations->C/C++ Remote Application\\ ->New Connection->General->SSH Only ",
18 "expected_results": ""
19 },
20 "3": {
21 "action": "Then right click to connect, input the user ID and password. ",
22 "expected_results": ""
23 },
24 "4": {
25 "action": "expand the connection, it will show the Sftp Files etc. \nNOTE. Might need to change dropbear to openssh and add the packagegroup-core-eclipse-debug recipe",
26 "expected_results": ""
27 }
28 },
29 "summary": "support_SSH_connection_to_Target"
30 }
31 },
32 {
33 "test": {
34 "@alias": "eclipse-plugin.eclipse-plugin.Launch_QEMU_from_Eclipse",
35 "author": [
36 {
37 "email": "ee.peng.yeoh@intel.com",
38 "name": "ee.peng.yeoh@intel.com"
39 }
40 ],
41 "execution": {
42 "1": {
43 "action": "Set the Yocto ADT's toolchain root location, sysroot location and kernel, in the menu Window -> Preferences -> Yocto ADT. \n \n",
44 "expected_results": ""
45 },
46 "2": {
47 "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
48 "expected_results": " Qemu can be lauched normally."
49 },
50 "3": {
51 "action": "(a)Point to the Toolchain: \n \nIf you are using a stand-alone pre-built toolchain, you should be pointing to the /opt/poky/{test-version} directory as Toolchain Root Location. This is the default location for toolchains installed by the ADT Installer or by hand. If ADT is installed in other location, use that location as Toolchain location.\nIf you are using a system-derived toolchain, the path you provide for the Toolchain Root Location field is the Yocto Project's build directory. \n \n E.g:/home/user/yocto/poky/build \n",
52 "expected_results": ""
53 },
54 "4": {
55 "action": "(b)Specify the Sysroot Location: \nSysroot Location is the location where the root filesystem for the target hardware is created on the development system by the ADT Installer (SYSROOT in step 2 of the case ADT installer Installation). \n \n Local : e.g: /home/user/qemux86-sato-sdk \nUsing ADT : e.g :/home/user/test-yocto/qemux86 \n\n",
56 "expected_results": ""
57 },
58 "5": {
59 "action": "(c)Select the Target Architecture: \n \nThe target architecture is the type of hardware you are going to use or emulate. Use the pull-down Target Architecture menu to make your selection. \n \n\n",
60 "expected_results": ""
61 },
62 "6": {
63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
64 "expected_results": ""
65 },
66 "7": {
67 "action": "(e) select OK to save the settings. \n\n\n1: In the Eclipse toolbar, expose the Run -> External Tools menu. Your image should appear as a selectable menu item. \n2: Select your image in the navigation pane to launch the emulator in a new window. \n3: If needed, enter your host root password in the shell window at the prompt. This sets up a Tap 0 connection needed for running in user-space NFS mode. \n",
68 "expected_results": ""
69 }
70 },
71 "summary": "Launch_QEMU_from_Eclipse"
72 }
73 },
74 {
75 "test": {
76 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project",
77 "author": [
78 {
79 "email": "ee.peng.yeoh@intel.com",
80 "name": "ee.peng.yeoh@intel.com"
81 }
82 ],
83 "execution": {
84 "1": {
85 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
86 "expected_results": ""
87 },
88 "2": {
89 "action": "Select File -> New -> Project.",
90 "expected_results": ""
91 },
92 "3": {
93 "action": "Double click C/C++.",
94 "expected_results": ""
95 },
96 "4": {
97 "action": "Click C or C++ Project to create the project.",
98 "expected_results": ""
99 },
100 "5": {
101 "action": "Expand Yocto ADT Project.",
102 "expected_results": ""
103 },
104 "6": {
105 "action": "Select Hello World ANSI C Autotools Project.",
106 "expected_results": ""
107 },
108 "7": {
109 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
110 "expected_results": ""
111 },
112 "8": {
113 "action": "Click Next.",
114 "expected_results": ""
115 },
116 "9": {
117 "action": "Add information in the Author and Copyright notice fields. \n1",
118 "expected_results": ""
119 },
120 "10": {
121 "action": "Click Finish. \n1",
122 "expected_results": ""
123 },
124 "11": {
125 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
126 "expected_results": ""
127 },
128 "12": {
129 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
130 "expected_results": ""
131 },
132 "13": {
133 "action": "In the Project Explorer window, right click the project -> Build project. \n1",
134 "expected_results": "Under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
135 },
136 "14": {
137 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
138 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
139 },
140 "15": {
141 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
142 "expected_results": ""
143 },
144 "16": {
145 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \nRight click it again and Debug as -> Debug Configurations \nUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \nin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application.\ne.g.: /home/root/myapplication \nIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
146 "expected_results": ""
147 },
148 "17": {
149 "action": "After all settings are done, select the Debug button on the bottom right corner",
150 "expected_results": ""
151 }
152 },
153 "summary": "Relocatable_SDK_-_C_-_Build_Hello_World_ANSI_C_Autotools_Project"
154 }
155 },
156 {
157 "test": {
158 "@alias": "eclipse-plugin.eclipse-plugin.Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project",
159 "author": [
160 {
161 "email": "ee.peng.yeoh@intel.com",
162 "name": "ee.peng.yeoh@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Launch a QEMU of target environment.(Reference to case \"ADT - Launch qemu by eclipse\") ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Select File -> New -> Project. ",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Double click C/C++. ",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Click C or C++ Project to create the project. ",
180 "expected_results": ""
181 },
182 "5": {
183 "action": "Expand Yocto ADT Project. ",
184 "expected_results": ""
185 },
186 "6": {
187 "action": "Select Hello World ANSI C++ Autotools Project. ",
188 "expected_results": ""
189 },
190 "7": {
191 "action": "Put a name in the Project name. Do not use hyphens as part of the name. \n \n",
192 "expected_results": ""
193 },
194 "8": {
195 "action": "Click Next.",
196 "expected_results": ""
197 },
198 "9": {
199 "action": "Add information in the Author and Copyright notice fields.",
200 "expected_results": ""
201 },
202 "10": {
203 "action": "Click Finish. \n1",
204 "expected_results": ""
205 },
206 "11": {
207 "action": "If the \"open perspective\" prompt appears, click \"Yes\" so that you open the C/C++ perspective. \n1",
208 "expected_results": ""
209 },
210 "12": {
211 "action": "In the Project Explorer window, right click the project -> Reconfigure project. \n1",
212 "expected_results": ""
213 },
214 "13": {
215 "action": "In the Project Explorer window, right click the project -> Build project. \n\n1",
216 "expected_results": "under the Project files, a new folder appears called Binaries. This indicates that the compilation have been successful and the project binary have been created. \n"
217 },
218 "14": {
219 "action": "Right click it again and Run as -> Run Configurations. \n\t\t\tUnder Run Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. e.g.: /home/root/myapplication \n\t\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button. \n1",
220 "expected_results": "step 14 to step 16 -> Build succeed and the console outputs Hello world, you can also check the output on target."
221 },
222 "15": {
223 "action": "After all settings are done, select the Run button on the bottom right corner \n\n1",
224 "expected_results": ""
225 },
226 "16": {
227 "action": "Repeat the steps 14-15, but instead of using Run Configurations use Debug Configurations: \n\t\tRight click it again and Debug as -> Debug Configurations \n\t\tUnder Debug Configurations expand \"C/C++ Remote Application\". A configuration for the current project should appear. Clicking it will display the configuration settings. \n\t\tin \"C/C++ Application\" field input Remote Absolute File path for C/C++ Application. \n\t\te.g.: /home/root/myapplication \n\t\tIn \"Connection\" drop-down list make sure a TCF connection is set up for your target. If not, create a new one by clicking the New button \n1",
228 "expected_results": ""
229 },
230 "17": {
231 "action": "After all settings are done, select the Debug button on the bottom right corner",
232 "expected_results": ""
233 }
234 },
235 "summary": "Relocatable_SDK_-_C++_-_Build_Hello_World_C++_Autotools_project"
236 }
237 },
238 {
239 "test": {
240 "@alias": "eclipse-plugin.eclipse-plugin.Build_Eclipse_Plugin_from_source",
241 "author": [
242 {
243 "email": "laurentiu.serban@intel.com",
244 "name": "laurentiu.serban@intel.com"
245 }
246 ],
247 "execution": {
248 "1": {
249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
251 },
252 "2": {
253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
254 "expected_results": "After plugin is build you must have 4 archive in foder scripts from eclipse-poky: \n - org.yocto.bc - mars-master-$date.zip \n - org.yocto.doc - mars-master-$date.zip --> documentation \n - org.yocto.sdk - mars-master-$date.zip \n - org.yocto.sdk - mars-master-$date.-archive.zip --> plugin "
255 },
256 "3": {
257 "action": "Move to scripts/ folder. \n\n",
258 "expected_results": ""
259 },
260 "4": {
261 "action": "Run ./setup.sh \n\n",
262 "expected_results": ""
263 },
264 "5": {
265 "action": "When the script finishes, it prompts a command to issue to build the plugin. It should look similar to the following: \n\n$ ECLIPSE_HOME=/eclipse-poky/scripts/eclipse ./build.sh /&1 | tee -a build.log \n\nHere, the three arguments to the build script are tag name, branch for documentation and release name. \n\n",
266 "expected_results": ""
267 },
268 "6": {
269 "action": "On an eclipse without the Yocto Plugin, select \"Install New Software\" from Help pull-down menu \n\n",
270 "expected_results": ""
271 },
272 "7": {
273 "action": "Select Add and from the dialog choose Archive... Look for the *archive.zip file that was built previously with the build.sh script. Click OK. \n\n",
274 "expected_results": ""
275 },
276 "8": {
277 "action": "Select all components and proceed with Installation of plugin. Restarting eclipse might be required.\n",
278 "expected_results": ""
279 }
280 },
281 "summary": "Build_Eclipse_Plugin_from_source"
282 }
283 },
284 {
285 "test": {
286 "@alias": "eclipse-plugin.eclipse-plugin.Eclipse_Poky_installation_and_setup",
287 "author": [
288 {
289 "email": "ee.peng.yeoh@intel.com",
290 "name": "ee.peng.yeoh@intel.com"
291 }
292 ],
293 "execution": {
294 "1": {
295 "action": "Install SDK \n\ta)Download https://autobuilder.yocto.io/pub/releases//toolchain/x86_64/poky-glibc-x86_64-core-\timage-sato-i586-toolchain-.sh \n\tb)Run the SDK installer and accept the default installation directory ",
296 "expected_results": ""
297 },
298 "2": {
299 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) \n\ta) Go to https://www.eclipse.org/downloads/packages/all, click \"Oxygen R\" \n\tb) Click to download the build for your OS \n\tc) Click \"Download\" button to download from a mirror \n\td) Run \"tar xf\" to extract the downloaded archive ",
300 "expected_result": ""
301 },
302 "3": {
303 "action": "Install \"Eclipse IDE for C/C++ Developers\" Oxygen release (4.7.0) (Continue) \n\te) Run \"eclipse/eclipse\" to start Eclipse \n\tf) Optional step for host machine within Intel network: In Eclipse workbench window, go to \"Window\" menu -> \"Preferences...\". \n\tg) In \"Preferences\" dialog, go to \"General\" -> \"Network Connections\", set \"Active Provider\" to \"Manual\". In \"Proxy \tentries\" table, select HTTP and click \"Edit\" and enter host \"proxy-chain.intel.com\" port 911, click OK. Repeat for HTTPS with port 912 \nClick OK to close \"Preferences\" dialog. \n\th) Go to \"File\" menu -> \"Restart\" to restart Eclipse for proxy settings to take effect. ",
304 "expected_result": ""
305 },
306 "4": {
307 "action": "Install Eclipse Poky plugins \n\ta) Download https://autobuilder.yocto.io/pub/releases/<yocto-version>/eclipse-plugin/<eclipse-version>/org.yocto.sdk-development-<date>-archive.zip \n\tb) In Eclipse workbench window, go to \"Help\" menu -> \"Install New Software...\" \n\tc) In \"Install\" dialog, click \"Add...\" button \n\td) In \"Add Repository\" dialog, enter \"Eclipse Poky\" for (repository) Name, click \"Archive...\" ",
308 "expected_results": ""
309 },
310 "5": {
311 "action": "Install Eclipse Poky plugins (continue) \n\te) In \"Repository archive\" browse dialog, select the downloaded Eclipse Poky repository archive \n\tf) Back in \"Add Repository\" dialog, click \"OK\" \n\tg) Back in \"Install\" dialog, make sure \"Work with:\" is set to \"Eclipse Poky\" repository, tick \"Yocto Project \tDocumentation Plug-in\" and \"Yocto Project SDK Plug-in\", click \"Next >\" and verify plugins/features name/version, \tclick \"Next >\" and accept license agreement, click \"Finish\" \n\th) If \"Security Warning\" dialog appears, click \"OK\" to install unsigned content. \n\ti) In \"Software Updates\" dialog, click \"Yes\" to restart Eclipse to complete Eclipse Poky plugins installation. ",
312 "expected_results": ""
313 },
314 "6": {
315 "action": "Setup Eclipse Poky to use SDK \n\ta) In Eclipse workbench window, go to \"Window\" menu -> \"Preferences\". \n\tb) In \"Preferences\" window, go to \"Yocto Project SDK\", in \"Cross Compiler Options\" frame, select \"Standalone pre-\tbuilt toolchain\". ",
316 "expected_results": "Eclipse Poky plugins installed and running successfully, e.g. observe that \"Yocto Project Tools\" menu is available on Eclipse workbench window."
317 }
318 },
319 "summary": "Eclipse_Poky_installation_and_setup"
320 }
321 }
322]
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index f588a93200..e81360670c 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,7 +57,7 @@ class LtpTestBase(OERuntimeTestCase):
57 57
58class LtpTest(LtpTestBase): 58class LtpTest(LtpTestBase):
59 59
60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"] 60 ltp_groups = ["math", "syscalls", "dio", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
61 61
62 ltp_fs = ["fs", "fs_bind"] 62 ltp_fs = ["fs", "fs_bind"]
63 # skip kernel cpuhotplug 63 # skip kernel cpuhotplug
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
index 2c0bd9a247..9c2677c4cf 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -1,2 +1,8 @@
1# These should be reviewed to see if they are still needed 1# These should be reviewed to see if they are still needed
2cacheinfo: Failed to find cpu0 device node 2cacheinfo: Failed to find cpu0 device node
3
4# 6.10 restructures sysctl registration such that mips
5# registers an empty table and generates harmless warnings:
6# failed when register_sysctl_sz sched_fair_sysctls to kernel
7# failed when register_sysctl_sz sched_core_sysctls to kernel
8failed when register_sysctl_sz sched
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
index b0c0fc9ddf..143db40d63 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -13,6 +13,14 @@ FBIOPUT_VSCREENINFO failed, double buffering disabled
13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size) 13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size) 14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
15invalid BAR (can't size) 15invalid BAR (can't size)
16# 6.10+ the invalid BAR warnings are of this format:
17# pci 0000:00:00.0: [Firmware Bug]: BAR 0: invalid; can't size
18# pci 0000:00:00.0: [Firmware Bug]: BAR 1: invalid; can't size
19# pci 0000:00:00.0: [Firmware Bug]: BAR 2: invalid; can't size
20# pci 0000:00:00.0: [Firmware Bug]: BAR 3: invalid; can't size
21# pci 0000:00:00.0: [Firmware Bug]: BAR 4: invalid; can't size
22# pci 0000:00:00.0: [Firmware Bug]: BAR 5: invalid; can't size
23invalid; can't size
16 24
17# These should be reviewed to see if they are still needed 25# These should be reviewed to see if they are still needed
18wrong ELF class 26wrong ELF class
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index 6966923c94..47c77fccd5 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -34,7 +34,7 @@ class ParseLogsTest(OERuntimeTestCase):
34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"] 34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
35 35
36 # The keywords that identify error messages in the log files 36 # The keywords that identify error messages in the log files
37 errors = ["error", "cannot", "can't", "failed"] 37 errors = ["error", "cannot", "can't", "failed", "---[ cut here ]---", "No irq handler for vector"]
38 38
39 # A list of error messages that should be ignored 39 # A list of error messages that should be ignored
40 ignore_errors = [] 40 ignore_errors = []
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index ee97b8ef66..364264369a 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -25,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
25 os.remove(cls.tmp_path) 25 os.remove(cls.tmp_path)
26 26
27 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
28 @OEHasPackage(['openssh-scp']) 28 @OEHasPackage({'openssh-scp', 'openssh-sftp-server'})
29 def test_scp_file(self): 29 def test_scp_file(self):
30 dst = '/tmp/test_scp_file' 30 dst = '/tmp/test_scp_file'
31 31
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index cdbef59500..b86428002f 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -4,6 +4,9 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import time
8import signal
9
7from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends 11from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage 12from oeqa.runtime.decorator.package import OEHasPackage
@@ -13,12 +16,22 @@ class SSHTest(OERuntimeTestCase):
13 @OETestDepends(['ping.PingTest.test_ping']) 16 @OETestDepends(['ping.PingTest.test_ping'])
14 @OEHasPackage(['dropbear', 'openssh-sshd']) 17 @OEHasPackage(['dropbear', 'openssh-sshd'])
15 def test_ssh(self): 18 def test_ssh(self):
16 (status, output) = self.target.run('sleep 20', timeout=2) 19 for i in range(20):
17 msg='run() timed out but return code was zero.' 20 status, output = self.target.run("uname -a", timeout=5)
18 self.assertNotEqual(status, 0, msg=msg) 21 if status == 0:
19 (status, output) = self.target.run('uname -a') 22 break
20 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) 23 elif status == 255 or status == -signal.SIGTERM:
21 (status, output) = self.target.run('cat /etc/controllerimage') 24 # ssh returns 255 only if a ssh error occurs. This could
22 msg = "This isn't the right image - /etc/controllerimage " \ 25 # be an issue with "Connection refused" because the port
23 "shouldn't be here %s" % output 26 # isn't open yet, and this could check explicitly for that
24 self.assertEqual(status, 1, msg=msg) 27 # here. However, let's keep it simple and just retry for
28 # all errors a limited amount of times with a sleep to
29 # give it time for the port to open.
30 # We sometimes see -15 (SIGTERM) on slow emulation machines too, likely
31 # from boot/init not being 100% complete, retry for these too.
32 time.sleep(5)
33 continue
34 else:
35 self.fail("uname failed with \"%s\" (exit code %s)" % (output, status))
36 if status != 0:
37 self.fail("ssh failed with \"%s\" (exit code %s)" % (output, status))
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 5481e1d840..640f28abe9 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -145,18 +145,29 @@ class SystemdServiceTests(SystemdTest):
145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks, 145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section). 146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
147 """ 147 """
148 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",)) 148 # use "env sleep" instead of "sleep" to avoid calling the shell builtin function
149 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && env sleep 1000",))
149 t_thread.start() 150 t_thread.start()
150 time.sleep(1) 151 time.sleep(1)
151 152
152 status, output = self.target.run('pidof sleep') 153 status, sleep_pid = self.target.run('pidof sleep')
153 # cause segfault on purpose 154 # cause segfault on purpose
154 self.target.run('kill -SEGV %s' % output) 155 self.target.run('kill -SEGV %s' % sleep_pid)
155 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % output) 156 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % sleep_pid)
156 157
157 (status, output) = self.target.run('coredumpctl info') 158 # Give some time to systemd-coredump@.service to process the coredump
159 for x in range(20):
160 status, output = self.target.run('coredumpctl list %s' % sleep_pid)
161 if status == 0:
162 break
163 time.sleep(1)
164 else:
165 self.fail("Timed out waiting for coredump creation")
166
167 (status, output) = self.target.run('coredumpctl info %s' % sleep_pid)
158 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output) 168 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
159 self.assertEqual('sleep_for_duration (busybox.nosuid' in output, True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output) 169 self.assertEqual('sleep_for_duration (busybox.nosuid' in output or 'xnanosleep (sleep.coreutils' in output,
170 True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
160 171
161class SystemdJournalTests(SystemdTest): 172class SystemdJournalTests(SystemdTest):
162 173
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index cb7227a8df..daabc44910 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -8,6 +8,7 @@ import os
8import sys 8import sys
9 9
10from oeqa.core.context import OETestContext, OETestContextExecutor 10from oeqa.core.context import OETestContext, OETestContextExecutor
11from oeqa.core.target.serial import OESerialTarget
11from oeqa.core.target.ssh import OESSHTarget 12from oeqa.core.target.ssh import OESSHTarget
12from oeqa.core.target.qemu import OEQemuTarget 13from oeqa.core.target.qemu import OEQemuTarget
13 14
@@ -60,7 +61,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
60 runtime_group = self.parser.add_argument_group('runtime options') 61 runtime_group = self.parser.add_argument_group('runtime options')
61 62
62 runtime_group.add_argument('--target-type', action='store', 63 runtime_group.add_argument('--target-type', action='store',
63 default=self.default_target_type, choices=['simpleremote', 'qemu'], 64 default=self.default_target_type, choices=['simpleremote', 'qemu', 'serial'],
64 help="Target type of device under test, default: %s" \ 65 help="Target type of device under test, default: %s" \
65 % self.default_target_type) 66 % self.default_target_type)
66 runtime_group.add_argument('--target-ip', action='store', 67 runtime_group.add_argument('--target-ip', action='store',
@@ -108,6 +109,8 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
108 target = OESSHTarget(logger, target_ip, server_ip, **kwargs) 109 target = OESSHTarget(logger, target_ip, server_ip, **kwargs)
109 elif target_type == 'qemu': 110 elif target_type == 'qemu':
110 target = OEQemuTarget(logger, server_ip, **kwargs) 111 target = OEQemuTarget(logger, server_ip, **kwargs)
112 elif target_type == 'serial':
113 target = OESerialTarget(logger, target_ip, server_ip, **kwargs)
111 else: 114 else:
112 # XXX: This code uses the old naming convention for controllers and 115 # XXX: This code uses the old naming convention for controllers and
113 # targets, the idea it is to leave just targets as the controller 116 # targets, the idea it is to leave just targets as the controller
@@ -203,8 +206,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
203 206
204 super(OERuntimeTestContextExecutor, self)._process_args(logger, args) 207 super(OERuntimeTestContextExecutor, self)._process_args(logger, args)
205 208
209 td = self.tc_kwargs['init']['td']
210
206 target_kwargs = {} 211 target_kwargs = {}
212 target_kwargs['machine'] = td.get("MACHINE") or None
207 target_kwargs['qemuboot'] = args.qemu_boot 213 target_kwargs['qemuboot'] = args.qemu_boot
214 target_kwargs['serialcontrol_cmd'] = td.get("TEST_SERIALCONTROL_CMD") or None
215 target_kwargs['serialcontrol_extra_args'] = td.get("TEST_SERIALCONTROL_EXTRA_ARGS") or ""
216 target_kwargs['serialcontrol_ps1'] = td.get("TEST_SERIALCONTROL_PS1") or None
217 target_kwargs['serialcontrol_connect_timeout'] = td.get("TEST_SERIALCONTROL_CONNECT_TIMEOUT") or None
208 218
209 self.tc_kwargs['init']['target'] = \ 219 self.tc_kwargs['init']['target'] = \
210 OERuntimeTestContextExecutor.getTarget(args.target_type, 220 OERuntimeTestContextExecutor.getTarget(args.target_type,
diff --git a/meta/lib/oeqa/sdk/case.py b/meta/lib/oeqa/sdk/case.py
index c45882689c..46a3789f57 100644
--- a/meta/lib/oeqa/sdk/case.py
+++ b/meta/lib/oeqa/sdk/case.py
@@ -6,6 +6,7 @@
6 6
7import os 7import os
8import subprocess 8import subprocess
9import shutil
9 10
10from oeqa.core.case import OETestCase 11from oeqa.core.case import OETestCase
11 12
@@ -21,12 +22,14 @@ class OESDKTestCase(OETestCase):
21 archive = os.path.basename(urlparse(url).path) 22 archive = os.path.basename(urlparse(url).path)
22 23
23 if dl_dir: 24 if dl_dir:
24 tarball = os.path.join(dl_dir, archive) 25 archive_tarball = os.path.join(dl_dir, archive)
25 if os.path.exists(tarball): 26 if os.path.exists(archive_tarball):
26 return tarball 27 return archive_tarball
27 28
28 tarball = os.path.join(workdir, archive) 29 tarball = os.path.join(workdir, archive)
29 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT) 30 subprocess.check_output(["wget", "-O", tarball, url], stderr=subprocess.STDOUT)
31 if dl_dir and not os.path.exists(archive_tarball):
32 shutil.copyfile(tarball, archive_tarball)
30 return tarball 33 return tarball
31 34
32 def check_elf(self, path, target_os=None, target_arch=None): 35 def check_elf(self, path, target_os=None, target_arch=None):
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/autotools.py
index 51003b19cd..4bac28f04d 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/autotools.py
@@ -13,10 +13,15 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class BuildCpioTest(OESDKTestCase): 16class AutotoolsTest(OESDKTestCase):
17 """ 17 """
18 Check that autotools will cross-compile correctly. 18 Check that autotools will cross-compile correctly.
19 """ 19 """
20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("AutotoolsTest class: SDK doesn't contain a supported C library")
24
20 def test_cpio(self): 25 def test_cpio(self):
21 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: 26 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
22 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz") 27 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/cmake.py
index e986838aea..cb0944ee99 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/cmake.py
@@ -13,22 +13,26 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class BuildAssimp(OESDKTestCase): 16class CMakeTest(OESDKTestCase):
17 """ 17 """
18 Test case to build a project using cmake. 18 Test case to build a project using cmake.
19 """ 19 """
20 20
21 def setUp(self): 21 def setUp(self):
22 libc = self.td.get("TCLIBC")
23 if libc in [ 'newlib' ]:
24 raise unittest.SkipTest("CMakeTest class: SDK doesn't contain a supported C library")
25
22 if not (self.tc.hasHostPackage("nativesdk-cmake") or 26 if not (self.tc.hasHostPackage("nativesdk-cmake") or
23 self.tc.hasHostPackage("cmake-native")): 27 self.tc.hasHostPackage("cmake-native")):
24 raise unittest.SkipTest("Needs cmake") 28 raise unittest.SkipTest("CMakeTest: needs cmake")
25 29
26 def test_assimp(self): 30 def test_assimp(self):
27 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: 31 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
28 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.3.1.tar.gz") 32 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.4.1.tar.gz")
29 33
30 dirs = {} 34 dirs = {}
31 dirs["source"] = os.path.join(testdir, "assimp-5.3.1") 35 dirs["source"] = os.path.join(testdir, "assimp-5.4.1")
32 dirs["build"] = os.path.join(testdir, "build") 36 dirs["build"] = os.path.join(testdir, "build")
33 dirs["install"] = os.path.join(testdir, "install") 37 dirs["install"] = os.path.join(testdir, "install")
34 38
@@ -39,7 +43,7 @@ class BuildAssimp(OESDKTestCase):
39 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs)) 43 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
40 os.makedirs(dirs["build"]) 44 os.makedirs(dirs["build"])
41 45
42 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs)) 46 self._run("cd {build} && cmake -DASSIMP_WARNINGS_AS_ERRORS=OFF -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
43 self._run("cmake --build {build} -- -j".format(**dirs)) 47 self._run("cmake --build {build} -- -j".format(**dirs))
44 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) 48 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
45 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.3.0")) 49 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.4.1"))
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index fc28b9c3d4..e810d2c42b 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -26,6 +26,10 @@ class GccCompileTest(OESDKTestCase):
26 os.path.join(self.tc.sdk_dir, f)) 26 os.path.join(self.tc.sdk_dir, f))
27 27
28 def setUp(self): 28 def setUp(self):
29 libc = self.td.get("TCLIBC")
30 if libc in [ 'newlib' ]:
31 raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a supported C library")
32
29 machine = self.td.get("MACHINE") 33 machine = self.td.get("MACHINE")
30 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or 34 if not (self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine) or
31 self.tc.hasHostPackage("^gcc-", regex=True)): 35 self.tc.hasHostPackage("^gcc-", regex=True)):
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/gtk3.py
index 178f07472d..8f60d5e7da 100644
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ b/meta/lib/oeqa/sdk/cases/gtk3.py
@@ -13,11 +13,15 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class GalculatorTest(OESDKTestCase): 16class GTK3Test(OESDKTestCase):
17 """ 17 """
18 Test that autotools and GTK+ 3 compiles correctly. 18 Test that autotools and GTK+ 3 compiles correctly.
19 """ 19 """
20 def setUp(self): 20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("GTK3Test class: SDK doesn't contain a supported C library")
24
21 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \ 25 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
22 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)): 26 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
23 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3") 27 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
diff --git a/meta/lib/oeqa/sdk/cases/kmod.py b/meta/lib/oeqa/sdk/cases/kmod.py
new file mode 100644
index 0000000000..9e8fdbcd40
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/kmod.py
@@ -0,0 +1,41 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import subprocess
9import tempfile
10import unittest
11
12from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output()
15
16class KernelModuleTest(OESDKTestCase):
17 """
18 Test that out-of-tree kernel modules build.
19 """
20
21 def setUp(self):
22 if not self.tc.hasTargetPackage("kernel-devsrc"):
23 raise unittest.SkipTest("KernelModuleTest needs kernel-devsrc")
24
25 # These targets need to be built before kernel modules can be built.
26 self._run("make -j -C $OECORE_TARGET_SYSROOT/usr/src/kernel prepare scripts")
27
28
29 def test_cryptodev(self):
30 with tempfile.TemporaryDirectory(prefix="cryptodev", dir=self.tc.sdk_dir) as testdir:
31 git_url = "https://github.com/cryptodev-linux/cryptodev-linux"
32 # This is a knnown-good commit post-1.13 that builds with kernel 6.7+
33 git_sha = "bb8bc7cf60d2c0b097c8b3b0e807f805b577a53f"
34
35 sourcedir = os.path.join(testdir, "cryptodev-linux")
36 subprocess.check_output(["git", "clone", git_url, sourcedir], stderr=subprocess.STDOUT)
37 self.assertTrue(os.path.isdir(sourcedir))
38 subprocess.check_output(["git", "-C", sourcedir, "checkout", git_sha], stderr=subprocess.STDOUT)
39
40 self._run("make -C %s V=1 KERNEL_DIR=$OECORE_TARGET_SYSROOT/usr/src/kernel" % sourcedir)
41 self.check_elf(os.path.join(sourcedir, "cryptodev.ko"))
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/makefile.py
index b4b7d85b88..e1e2484820 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/makefile.py
@@ -4,15 +4,21 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os, tempfile, subprocess, unittest 7import os, tempfile, subprocess
8import unittest
8from oeqa.sdk.case import OESDKTestCase 9from oeqa.sdk.case import OESDKTestCase
9from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
10errors_have_output() 11errors_have_output()
11 12
12class BuildLzipTest(OESDKTestCase): 13class MakefileTest(OESDKTestCase):
13 """ 14 """
14 Test that "plain" compilation works, using just $CC $CFLAGS etc. 15 Test that "plain" compilation works, using just $CC $CFLAGS etc.
15 """ 16 """
17 def setUp(self):
18 libc = self.td.get("TCLIBC")
19 if libc in [ 'newlib' ]:
20 raise unittest.SkipTest("MakefileTest class: SDK doesn't contain a supported C library")
21
16 def test_lzip(self): 22 def test_lzip(self):
17 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir: 23 with tempfile.TemporaryDirectory(prefix="lzip", dir=self.tc.sdk_dir) as testdir:
18 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz") 24 tarball = self.fetch(testdir, self.td["DL_DIR"], "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz")
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
index ea10f568b2..20f6b553d0 100644
--- a/meta/lib/oeqa/sdk/cases/maturin.py
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -8,7 +8,6 @@ import os
8import shutil 8import shutil
9import unittest 9import unittest
10 10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase 11from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 12from oeqa.utils.subprocesstweak import errors_have_output
14 13
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/meson.py
index 147ee3e0ee..1edf78720a 100644
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ b/meta/lib/oeqa/sdk/cases/meson.py
@@ -13,14 +13,18 @@ from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
14errors_have_output() 14errors_have_output()
15 15
16class EpoxyTest(OESDKTestCase): 16class MesonTest(OESDKTestCase):
17 """ 17 """
18 Test that Meson builds correctly. 18 Test that Meson builds correctly.
19 """ 19 """
20 def setUp(self): 20 def setUp(self):
21 libc = self.td.get("TCLIBC")
22 if libc in [ 'newlib' ]:
23 raise unittest.SkipTest("MesonTest class: SDK doesn't contain a supported C library")
24
21 if not (self.tc.hasHostPackage("nativesdk-meson") or 25 if not (self.tc.hasHostPackage("nativesdk-meson") or
22 self.tc.hasHostPackage("meson-native")): 26 self.tc.hasHostPackage("meson-native")):
23 raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson") 27 raise unittest.SkipTest("MesonTest: needs meson")
24 28
25 def test_epoxy(self): 29 def test_epoxy(self):
26 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: 30 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index 5ea992b9f3..51284949f5 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -4,7 +4,7 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import subprocess, unittest 7import unittest
8from oeqa.sdk.case import OESDKTestCase 8from oeqa.sdk.case import OESDKTestCase
9 9
10from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
index f5d437bb19..a54245851b 100644
--- a/meta/lib/oeqa/sdk/cases/rust.py
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -8,7 +8,6 @@ import os
8import shutil 8import shutil
9import unittest 9import unittest
10 10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase 11from oeqa.sdk.case import OESDKTestCase
13 12
14from oeqa.utils.subprocesstweak import errors_have_output 13from oeqa.utils.subprocesstweak import errors_have_output
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index 5ffb732556..d0746e68eb 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -69,10 +69,9 @@ class DevtoolTest(OESDKExtTestCase):
69 self._test_devtool_build(self.myapp_cmake_dst) 69 self._test_devtool_build(self.myapp_cmake_dst)
70 70
71 def test_extend_autotools_recipe_creation(self): 71 def test_extend_autotools_recipe_creation(self):
72 req = 'https://github.com/rdfa/librdfa' 72 recipe = "test-dbus-wait"
73 recipe = "librdfa" 73 self._run('devtool sdk-install dbus')
74 self._run('devtool sdk-install libxml2') 74 self._run('devtool add %s https://git.yoctoproject.org/git/dbus-wait' % (recipe) )
75 self._run('devtool add %s %s' % (recipe, req) )
76 try: 75 try:
77 self._run('devtool build %s' % recipe) 76 self._run('devtool build %s' % recipe)
78 finally: 77 finally:
diff --git a/meta/lib/oeqa/selftest/cases/bbclasses.py b/meta/lib/oeqa/selftest/cases/bbclasses.py
new file mode 100644
index 0000000000..10545ebe65
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bbclasses.py
@@ -0,0 +1,106 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_vars, bitbake
9
10class Systemd(OESelftestTestCase):
11 """
12 Tests related to the systemd bbclass.
13 """
14
15 def getVars(self, recipe):
16 self.bb_vars = get_bb_vars(
17 [
18 'BPN',
19 'D',
20 'INIT_D_DIR',
21 'prefix',
22 'systemd_system_unitdir',
23 'sysconfdir',
24 ],
25 recipe,
26 )
27
28 def fileExists(self, filename):
29 self.assertExists(filename.format(**self.bb_vars))
30
31 def fileNotExists(self, filename):
32 self.assertNotExists(filename.format(**self.bb_vars))
33
34 def test_systemd_in_distro(self):
35 """
36 Summary: Verify that no sysvinit files are installed when the
37 systemd distro feature is enabled, but sysvinit is not.
38 Expected: Systemd service file exists, but /etc does not.
39 Product: OE-Core
40 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
41 """
42
43 self.write_config("""
44DISTRO_FEATURES:append = " systemd usrmerge"
45DISTRO_FEATURES:remove = "sysvinit"
46VIRTUAL-RUNTIME_init_manager = "systemd"
47""")
48 bitbake("systemd-only systemd-and-sysvinit -c install")
49
50 self.getVars("systemd-only")
51 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
52
53 self.getVars("systemd-and-sysvinit")
54 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
55 self.fileNotExists("{D}{sysconfdir}")
56
57 def test_systemd_and_sysvinit_in_distro(self):
58 """
59 Summary: Verify that both systemd and sysvinit files are installed
60 when both the systemd and sysvinit distro features are
61 enabled.
62 Expected: Systemd service file and sysvinit initscript exist.
63 Product: OE-Core
64 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
65 """
66
67 self.write_config("""
68DISTRO_FEATURES:append = " systemd sysvinit usrmerge"
69VIRTUAL-RUNTIME_init_manager = "systemd"
70""")
71 bitbake("systemd-only systemd-and-sysvinit -c install")
72
73 self.getVars("systemd-only")
74 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
75
76 self.getVars("systemd-and-sysvinit")
77 self.fileExists("{D}{systemd_system_unitdir}/{BPN}.service")
78 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
79
80 def test_sysvinit_in_distro(self):
81 """
82 Summary: Verify that no systemd service files are installed when the
83 sysvinit distro feature is enabled, but systemd is not.
84 Expected: The systemd service file does not exist, nor does /usr.
85 The sysvinit initscript exists.
86 Product: OE-Core
87 Author: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
88 """
89
90 self.write_config("""
91DISTRO_FEATURES:remove = "systemd"
92DISTRO_FEATURES:append = " sysvinit usrmerge"
93VIRTUAL-RUNTIME_init_manager = "sysvinit"
94""")
95 bitbake("systemd-only systemd-and-sysvinit -c install")
96
97 self.getVars("systemd-only")
98 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
99 self.fileNotExists("{D}{prefix}")
100 self.fileNotExists("{D}{sysconfdir}")
101 self.fileExists("{D}")
102
103 self.getVars("systemd-and-sysvinit")
104 self.fileNotExists("{D}{systemd_system_unitdir}/{BPN}.service")
105 self.fileNotExists("{D}{prefix}")
106 self.fileExists("{D}{INIT_D_DIR}/{BPN}")
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 1688eabe4e..5ff263d342 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -33,7 +33,7 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
33 features.append('CHECK_TARGETS = "{0}"'.format(suite)) 33 features.append('CHECK_TARGETS = "{0}"'.format(suite))
34 self.write_config("\n".join(features)) 34 self.write_config("\n".join(features))
35 35
36 recipe = "binutils-cross-testsuite" 36 recipe = "binutils-testsuite"
37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) 37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] 38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
39 39
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index 31dafaa9c5..423c31e189 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -84,7 +84,7 @@ class SanityOptionsTest(OESelftestTestCase):
84 84
85 self.write_config("INHERIT:remove = \"report-error\"") 85 self.write_config("INHERIT:remove = \"report-error\"")
86 if "packages-list" not in get_bb_var("ERROR_QA"): 86 if "packages-list" not in get_bb_var("ERROR_QA"):
87 self.append_config("ERROR_QA:append = \" packages-list\"") 87 self.append_config("ERROR_QA:append:pn-xcursor-transparent-theme = \" packages-list\"")
88 88
89 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 89 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
90 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') 90 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -94,8 +94,8 @@ class SanityOptionsTest(OESelftestTestCase):
94 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) 94 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
95 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 95 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
96 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 96 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
97 self.append_config('ERROR_QA:remove = "packages-list"') 97 self.append_config('ERROR_QA:remove:pn-xcursor-transparent-theme = "packages-list"')
98 self.append_config('WARN_QA:append = " packages-list"') 98 self.append_config('WARN_QA:append:pn-xcursor-transparent-theme = " packages-list"')
99 res = bitbake("xcursor-transparent-theme -f -c package") 99 res = bitbake("xcursor-transparent-theme -f -c package")
100 self.delete_recipeinc('xcursor-transparent-theme') 100 self.delete_recipeinc('xcursor-transparent-theme')
101 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") 101 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
@@ -173,8 +173,8 @@ class BuildhistoryTests(BuildhistoryBase):
173 173
174 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest')) 174 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
175 if 'FILELIST' in data: 175 if 'FILELIST' in data:
176 self.assertEqual(data['FILELIST'], '') 176 self.assertEqual(data['FILELIST'], '/usr/share/pkgconfig/default-icon-theme.pc')
177 self.assertEqual(int(data['PKGSIZE']), 0) 177 self.assertGreater(int(data['PKGSIZE']), 0)
178 178
179class ArchiverTest(OESelftestTestCase): 179class ArchiverTest(OESelftestTestCase):
180 def test_arch_work_dir_and_export_source(self): 180 def test_arch_work_dir_and_export_source(self):
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 60cecd1328..3dd3e89d3e 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -72,6 +72,54 @@ class CVECheck(OESelftestTestCase):
72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8") 72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31") 73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
74 74
75 def test_product_match(self):
76 from oe.cve_check import has_cve_product_match
77
78 status = {}
79 status["detail"] = "ignored"
80 status["vendor"] = "*"
81 status["product"] = "*"
82 status["description"] = ""
83 status["mapping"] = ""
84
85 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), True)
86 self.assertEqual(has_cve_product_match(status, "*:*"), True)
87 self.assertEqual(has_cve_product_match(status, "some_product"), True)
88 self.assertEqual(has_cve_product_match(status, "glibc"), True)
89 self.assertEqual(has_cve_product_match(status, "glibca"), True)
90 self.assertEqual(has_cve_product_match(status, "aglibc"), True)
91 self.assertEqual(has_cve_product_match(status, "*"), True)
92 self.assertEqual(has_cve_product_match(status, "aglibc glibc test:test"), True)
93
94 status["product"] = "glibc"
95 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
96 # The CPE in the recipe must be defined, no * accepted
97 self.assertEqual(has_cve_product_match(status, "*:*"), False)
98 self.assertEqual(has_cve_product_match(status, "*"), False)
99 self.assertEqual(has_cve_product_match(status, "some_product"), False)
100 self.assertEqual(has_cve_product_match(status, "glibc"), True)
101 self.assertEqual(has_cve_product_match(status, "glibca"), False)
102 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
103 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), True)
104 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc test"), True)
105 self.assertEqual(has_cve_product_match(status, "test some_vendor:glibc"), True)
106
107 status["vendor"] = "glibca"
108 status["product"] = "glibc"
109 self.assertEqual(has_cve_product_match(status, "some_vendor:some_product"), False)
110 # The CPE in the recipe must be defined, no * accepted
111 self.assertEqual(has_cve_product_match(status, "*:*"), False)
112 self.assertEqual(has_cve_product_match(status, "*"), False)
113 self.assertEqual(has_cve_product_match(status, "some_product"), False)
114 self.assertEqual(has_cve_product_match(status, "glibc"), False)
115 self.assertEqual(has_cve_product_match(status, "glibca"), False)
116 self.assertEqual(has_cve_product_match(status, "aglibc"), False)
117 self.assertEqual(has_cve_product_match(status, "some_vendor:glibc"), False)
118 self.assertEqual(has_cve_product_match(status, "glibca:glibc"), True)
119 self.assertEqual(has_cve_product_match(status, "test:test glibca:glibc"), True)
120 self.assertEqual(has_cve_product_match(status, "test glibca:glibc"), True)
121 self.assertEqual(has_cve_product_match(status, "glibca:glibc test"), True)
122
75 123
76 def test_recipe_report_json(self): 124 def test_recipe_report_json(self):
77 config = """ 125 config = """
@@ -217,9 +265,10 @@ CVE_CHECK_REPORT_PATCHED = "1"
217 # m4 CVE should not be in logrotate 265 # m4 CVE should not be in logrotate
218 self.assertNotIn("CVE-2008-1687", found_cves) 266 self.assertNotIn("CVE-2008-1687", found_cves)
219 # logrotate has both Patched and Ignored CVEs 267 # logrotate has both Patched and Ignored CVEs
268 detail = "version-not-in-range"
220 self.assertIn("CVE-2011-1098", found_cves) 269 self.assertIn("CVE-2011-1098", found_cves)
221 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched") 270 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
222 self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0) 271 self.assertEqual(found_cves["CVE-2011-1098"]["detail"], detail)
223 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0) 272 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
224 detail = "not-applicable-platform" 273 detail = "not-applicable-platform"
225 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used" 274 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
index 505b4be837..46c0cd87bb 100644
--- a/meta/lib/oeqa/selftest/cases/debuginfod.py
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -62,7 +62,7 @@ class Debuginfod(OESelftestTestCase):
62 62
63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest) 63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
64 64
65 def start_debuginfod(self): 65 def start_debuginfod(self, feed_dir):
66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot 66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
67 67
68 # Save some useful paths for later 68 # Save some useful paths for later
@@ -82,7 +82,7 @@ class Debuginfod(OESelftestTestCase):
82 # Disable rescanning, this is a one-shot test 82 # Disable rescanning, this is a one-shot test
83 "--rescan-time=0", 83 "--rescan-time=0",
84 "--groom-time=0", 84 "--groom-time=0",
85 get_bb_var("DEPLOY_DIR"), 85 feed_dir,
86 ] 86 ]
87 87
88 format = get_bb_var("PACKAGE_CLASSES").split()[0] 88 format = get_bb_var("PACKAGE_CLASSES").split()[0]
@@ -114,11 +114,12 @@ class Debuginfod(OESelftestTestCase):
114 self.write_config(""" 114 self.write_config("""
115TMPDIR = "${TOPDIR}/tmp-debuginfod" 115TMPDIR = "${TOPDIR}/tmp-debuginfod"
116DISTRO_FEATURES:append = " debuginfod" 116DISTRO_FEATURES:append = " debuginfod"
117INHERIT += "localpkgfeed"
117""") 118""")
118 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package") 119 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package xz:do_localpkgfeed")
119 120
120 try: 121 try:
121 self.start_debuginfod() 122 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
122 123
123 env = os.environ.copy() 124 env = os.environ.copy()
124 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port 125 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
@@ -141,12 +142,13 @@ DISTRO_FEATURES:append = " debuginfod"
141 self.write_config(""" 142 self.write_config("""
142TMPDIR = "${TOPDIR}/tmp-debuginfod" 143TMPDIR = "${TOPDIR}/tmp-debuginfod"
143DISTRO_FEATURES:append = " debuginfod" 144DISTRO_FEATURES:append = " debuginfod"
145INHERIT += "localpkgfeed"
144CORE_IMAGE_EXTRA_INSTALL += "elfutils xz" 146CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
145 """) 147 """)
146 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot") 148 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot xz:do_localpkgfeed")
147 149
148 try: 150 try:
149 self.start_debuginfod() 151 self.start_debuginfod(get_bb_var("LOCALPKGFEED_DIR", "xz"))
150 152
151 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu: 153 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
152 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port) 154 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 882225dde3..7d61773980 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -753,6 +753,25 @@ class DevtoolModifyTests(DevtoolBase):
753 result = runCmd('devtool status') 753 result = runCmd('devtool status')
754 self.assertNotIn('mdadm', result.output) 754 self.assertNotIn('mdadm', result.output)
755 755
756 def test_devtool_modify_go(self):
757 import oe.path
758 from tempfile import TemporaryDirectory
759 with TemporaryDirectory(prefix='devtoolqa') as tempdir:
760 self.track_for_cleanup(self.workspacedir)
761 self.add_command_to_tearDown('bitbake -c clean go-helloworld')
762 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
763 result = runCmd('devtool modify go-helloworld -x %s' % tempdir)
764 self.assertExists(
765 oe.path.join(tempdir, 'src', 'golang.org', 'x', 'example', 'go.mod'),
766 'Extracted source could not be found'
767 )
768 self.assertExists(
769 oe.path.join(self.workspacedir, 'conf', 'layer.conf'),
770 'Workspace directory not created'
771 )
772 matches = glob.glob(oe.path.join(self.workspacedir, 'appends', 'go-helloworld_*.bbappend'))
773 self.assertTrue(matches, 'bbappend not created %s' % result.output)
774
756 def test_devtool_buildclean(self): 775 def test_devtool_buildclean(self):
757 def assertFile(path, *paths): 776 def assertFile(path, *paths):
758 f = os.path.join(path, *paths) 777 f = os.path.join(path, *paths)
@@ -879,13 +898,8 @@ class DevtoolModifyTests(DevtoolBase):
879 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe) 898 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
880 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 899 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
881 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 900 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
882 srcfile = os.path.join(tempdir, 'oe-local-files/share/dot.bashrc') 901 srcfile = os.path.join(tempdir, 'share/dot.bashrc')
883 srclink = os.path.join(tempdir, 'share/dot.bashrc')
884 self.assertExists(srcfile, 'Extracted source could not be found') 902 self.assertExists(srcfile, 'Extracted source could not be found')
885 if os.path.islink(srclink) and os.path.exists(srclink) and os.path.samefile(srcfile, srclink):
886 correct_symlink = True
887 self.assertTrue(correct_symlink, 'Source symlink to oe-local-files is broken')
888
889 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe)) 903 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
890 self.assertTrue(matches, 'bbappend not created') 904 self.assertTrue(matches, 'bbappend not created')
891 # Test devtool status 905 # Test devtool status
@@ -956,9 +970,9 @@ class DevtoolModifyTests(DevtoolBase):
956 # others git:// in SRC_URI 970 # others git:// in SRC_URI
957 # cointains a patch 971 # cointains a patch
958 testrecipe = 'hello-rs' 972 testrecipe = 'hello-rs'
959 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe) 973 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'UNPACKDIR', 'CARGO_HOME'], testrecipe)
960 recipefile = bb_vars['FILE'] 974 recipefile = bb_vars['FILE']
961 workdir = bb_vars['WORKDIR'] 975 unpackdir = bb_vars['UNPACKDIR']
962 cargo_home = bb_vars['CARGO_HOME'] 976 cargo_home = bb_vars['CARGO_HOME']
963 src_uri = bb_vars['SRC_URI'].split() 977 src_uri = bb_vars['SRC_URI'].split()
964 self.assertTrue(src_uri[0].startswith('git://'), 978 self.assertTrue(src_uri[0].startswith('git://'),
@@ -1029,7 +1043,7 @@ class DevtoolModifyTests(DevtoolBase):
1029 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"') 1043 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
1030 raw_url = raw_url.replace("git://", '%s://' % parms['protocol']) 1044 raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
1031 patch_line = '[patch."%s"]' % raw_url 1045 patch_line = '[patch."%s"]' % raw_url
1032 path_patched = os.path.join(workdir, parms['destsuffix']) 1046 path_patched = os.path.join(unpackdir, parms['destsuffix'])
1033 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched) 1047 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
1034 # Would have been better to use tomllib to read this file :/ 1048 # Would have been better to use tomllib to read this file :/
1035 self.assertIn(patch_line, cargo_config_contents) 1049 self.assertIn(patch_line, cargo_config_contents)
@@ -1278,7 +1292,7 @@ class DevtoolUpdateTests(DevtoolBase):
1278 with open(bbappendfile, 'r') as f: 1292 with open(bbappendfile, 'r') as f:
1279 self.assertEqual(expectedlines, f.readlines()) 1293 self.assertEqual(expectedlines, f.readlines())
1280 # Drop new commit and check patch gets deleted 1294 # Drop new commit and check patch gets deleted
1281 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1295 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1282 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1296 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
1283 self.assertNotExists(patchfile, 'Patch file not deleted') 1297 self.assertNotExists(patchfile, 'Patch file not deleted')
1284 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n', 1298 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
@@ -1287,6 +1301,7 @@ class DevtoolUpdateTests(DevtoolBase):
1287 self.assertEqual(expectedlines2, f.readlines()) 1301 self.assertEqual(expectedlines2, f.readlines())
1288 # Put commit back and check we can run it if layer isn't in bblayers.conf 1302 # Put commit back and check we can run it if layer isn't in bblayers.conf
1289 os.remove(bbappendfile) 1303 os.remove(bbappendfile)
1304 result = runCmd("sed 's!\\(#define VERSION\\W*\"[^\"]*\\)\"!\\1-custom\"!' -i ReadMe.c", cwd=tempsrcdir)
1290 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir) 1305 result = runCmd('git commit -a -m "Add our custom version"', cwd=tempsrcdir)
1291 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1306 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1292 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1307 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
@@ -1361,7 +1376,7 @@ class DevtoolUpdateTests(DevtoolBase):
1361 with open(bbappendfile, 'r') as f: 1376 with open(bbappendfile, 'r') as f:
1362 self.assertEqual(expectedlines, set(f.readlines())) 1377 self.assertEqual(expectedlines, set(f.readlines()))
1363 # Drop new commit and check SRCREV changes 1378 # Drop new commit and check SRCREV changes
1364 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1379 result = runCmd('git reset HEAD^ --hard', cwd=tempsrcdir)
1365 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1380 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
1366 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created') 1381 self.assertNotExists(os.path.join(appenddir, testrecipe), 'Patch directory should not be created')
1367 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir) 1382 result = runCmd('git rev-parse HEAD', cwd=tempsrcdir)
@@ -1373,6 +1388,7 @@ class DevtoolUpdateTests(DevtoolBase):
1373 self.assertEqual(expectedlines, set(f.readlines())) 1388 self.assertEqual(expectedlines, set(f.readlines()))
1374 # Put commit back and check we can run it if layer isn't in bblayers.conf 1389 # Put commit back and check we can run it if layer isn't in bblayers.conf
1375 os.remove(bbappendfile) 1390 os.remove(bbappendfile)
1391 result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir)
1376 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) 1392 result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir)
1377 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir) 1393 result = runCmd('bitbake-layers remove-layer %s' % templayerdir, cwd=self.builddir)
1378 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir)) 1394 result = runCmd('devtool update-recipe -m srcrev %s -a %s' % (testrecipe, templayerdir))
@@ -1404,11 +1420,12 @@ class DevtoolUpdateTests(DevtoolBase):
1404 # Try building just to ensure we haven't broken that 1420 # Try building just to ensure we haven't broken that
1405 bitbake("%s" % testrecipe) 1421 bitbake("%s" % testrecipe)
1406 # Edit / commit local source 1422 # Edit / commit local source
1407 runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) 1423 runCmd('echo "/* Foobar */" >> makedevs.c', cwd=tempdir)
1408 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1424 runCmd('echo "Foo" > new-local', cwd=tempdir)
1409 runCmd('echo "Bar" > new-file', cwd=tempdir) 1425 runCmd('echo "Bar" > new-file', cwd=tempdir)
1410 runCmd('git add new-file', cwd=tempdir) 1426 runCmd('git add new-file', cwd=tempdir)
1411 runCmd('git commit -m "Add new file"', cwd=tempdir) 1427 runCmd('git commit -m "Add new file"', cwd=tempdir)
1428 runCmd('git add new-local', cwd=tempdir)
1412 runCmd('devtool update-recipe %s' % testrecipe) 1429 runCmd('devtool update-recipe %s' % testrecipe)
1413 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1430 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1414 (' M', '.*/makedevs/makedevs.c$'), 1431 (' M', '.*/makedevs/makedevs.c$'),
@@ -1434,8 +1451,8 @@ class DevtoolUpdateTests(DevtoolBase):
1434 self.assertExists(local_file, 'File makedevs.c not created') 1451 self.assertExists(local_file, 'File makedevs.c not created')
1435 self.assertExists(patchfile, 'File new_local not created') 1452 self.assertExists(patchfile, 'File new_local not created')
1436 1453
1437 def test_devtool_update_recipe_local_files_2(self): 1454 def _test_devtool_update_recipe_local_files_2(self):
1438 """Check local source files support when oe-local-files is in Git""" 1455 """Check local source files support when editing local files in Git"""
1439 testrecipe = 'devtool-test-local' 1456 testrecipe = 'devtool-test-local'
1440 recipefile = get_bb_var('FILE', testrecipe) 1457 recipefile = get_bb_var('FILE', testrecipe)
1441 recipedir = os.path.dirname(recipefile) 1458 recipedir = os.path.dirname(recipefile)
@@ -1450,17 +1467,13 @@ class DevtoolUpdateTests(DevtoolBase):
1450 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) 1467 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1451 # Check git repo 1468 # Check git repo
1452 self._check_src_repo(tempdir) 1469 self._check_src_repo(tempdir)
1453 # Add oe-local-files to Git
1454 runCmd('rm oe-local-files/.gitignore', cwd=tempdir)
1455 runCmd('git add oe-local-files', cwd=tempdir)
1456 runCmd('git commit -m "Add local sources"', cwd=tempdir)
1457 # Edit / commit local sources 1470 # Edit / commit local sources
1458 runCmd('echo "# Foobar" >> oe-local-files/file1', cwd=tempdir) 1471 runCmd('echo "# Foobar" >> file1', cwd=tempdir)
1459 runCmd('git commit -am "Edit existing file"', cwd=tempdir) 1472 runCmd('git commit -am "Edit existing file"', cwd=tempdir)
1460 runCmd('git rm oe-local-files/file2', cwd=tempdir) 1473 runCmd('git rm file2', cwd=tempdir)
1461 runCmd('git commit -m"Remove file"', cwd=tempdir) 1474 runCmd('git commit -m"Remove file"', cwd=tempdir)
1462 runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) 1475 runCmd('echo "Foo" > new-local', cwd=tempdir)
1463 runCmd('git add oe-local-files/new-local', cwd=tempdir) 1476 runCmd('git add new-local', cwd=tempdir)
1464 runCmd('git commit -m "Add new local file"', cwd=tempdir) 1477 runCmd('git commit -m "Add new local file"', cwd=tempdir)
1465 runCmd('echo "Gar" > new-file', cwd=tempdir) 1478 runCmd('echo "Gar" > new-file', cwd=tempdir)
1466 runCmd('git add new-file', cwd=tempdir) 1479 runCmd('git add new-file', cwd=tempdir)
@@ -1469,7 +1482,7 @@ class DevtoolUpdateTests(DevtoolBase):
1469 os.path.dirname(recipefile)) 1482 os.path.dirname(recipefile))
1470 # Checkout unmodified file to working copy -> devtool should still pick 1483 # Checkout unmodified file to working copy -> devtool should still pick
1471 # the modified version from HEAD 1484 # the modified version from HEAD
1472 runCmd('git checkout HEAD^ -- oe-local-files/file1', cwd=tempdir) 1485 runCmd('git checkout HEAD^ -- file1', cwd=tempdir)
1473 runCmd('devtool update-recipe %s' % testrecipe) 1486 runCmd('devtool update-recipe %s' % testrecipe)
1474 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1487 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1475 (' M', '.*/file1$'), 1488 (' M', '.*/file1$'),
@@ -1544,7 +1557,7 @@ class DevtoolUpdateTests(DevtoolBase):
1544 # (don't bother with cleaning the recipe on teardown, we won't be building it) 1557 # (don't bother with cleaning the recipe on teardown, we won't be building it)
1545 result = runCmd('devtool modify %s' % testrecipe) 1558 result = runCmd('devtool modify %s' % testrecipe)
1546 # Modify one file 1559 # Modify one file
1547 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) 1560 runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe))
1548 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1561 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1549 result = runCmd('devtool update-recipe %s' % testrecipe) 1562 result = runCmd('devtool update-recipe %s' % testrecipe)
1550 expected_status = [(' M', '.*/%s/file2$' % testrecipe)] 1563 expected_status = [(' M', '.*/%s/file2$' % testrecipe)]
@@ -2004,6 +2017,52 @@ class DevtoolUpgradeTests(DevtoolBase):
2004 newlines = f.readlines() 2017 newlines = f.readlines()
2005 self.assertEqual(desiredlines, newlines) 2018 self.assertEqual(desiredlines, newlines)
2006 2019
2020 def test_devtool_upgrade_recipe_upgrade_extra_tasks(self):
2021 # Check preconditions
2022 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2023 self.track_for_cleanup(self.workspacedir)
2024 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2025 recipe = 'python3-guessing-game'
2026 version = '0.2.0'
2027 commit = '40cf004c2772ffa20ea803fa3be1528a75be3e98'
2028 oldrecipefile = get_bb_var('FILE', recipe)
2029 oldcratesincfile = os.path.join(os.path.dirname(oldrecipefile), os.path.basename(oldrecipefile).strip('_git.bb') + '-crates.inc')
2030 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2031 self.track_for_cleanup(tempdir)
2032 # Check that recipe is not already under devtool control
2033 result = runCmd('devtool status')
2034 self.assertNotIn(recipe, result.output)
2035 # Check upgrade
2036 result = runCmd('devtool upgrade %s %s --version %s --srcrev %s' % (recipe, tempdir, version, commit))
2037 # Check if srctree at least is populated
2038 self.assertTrue(len(os.listdir(tempdir)) > 0, 'srctree (%s) should be populated with new (%s) source code' % (tempdir, commit))
2039 # Check new recipe file and new -crates.inc files are present
2040 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldrecipefile))
2041 newcratesincfile = os.path.join(self.workspacedir, 'recipes', recipe, os.path.basename(oldcratesincfile))
2042 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
2043 self.assertExists(newcratesincfile, 'Recipe crates.inc file should exist after upgrade')
2044 # Check devtool status and make sure recipe is present
2045 result = runCmd('devtool status')
2046 self.assertIn(recipe, result.output)
2047 self.assertIn(tempdir, result.output)
2048 # Check recipe got changed as expected
2049 with open(oldrecipefile + '.upgraded', 'r') as f:
2050 desiredlines = f.readlines()
2051 with open(newrecipefile, 'r') as f:
2052 newlines = f.readlines()
2053 self.assertEqual(desiredlines, newlines)
2054 # Check crates.inc got changed as expected
2055 with open(oldcratesincfile + '.upgraded', 'r') as f:
2056 desiredlines = f.readlines()
2057 with open(newcratesincfile, 'r') as f:
2058 newlines = f.readlines()
2059 self.assertEqual(desiredlines, newlines)
2060 # Check devtool reset recipe
2061 result = runCmd('devtool reset %s -n' % recipe)
2062 result = runCmd('devtool status')
2063 self.assertNotIn(recipe, result.output)
2064 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
2065
2007 def test_devtool_layer_plugins(self): 2066 def test_devtool_layer_plugins(self):
2008 """Test that devtool can use plugins from other layers. 2067 """Test that devtool can use plugins from other layers.
2009 2068
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index ad952c004b..7771a42e2b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -20,10 +20,10 @@ class Distrodata(OESelftestTestCase):
20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n' 20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
21 self.write_config(feature) 21 self.write_config(feature)
22 22
23 pkgs = oe.recipeutils.get_recipe_upgrade_status() 23 pkggroups = oe.recipeutils.get_recipe_upgrade_status()
24 24
25 regressed_failures = [pkg[0] for pkg in pkgs if pkg[1] == 'UNKNOWN_BROKEN'] 25 regressed_failures = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'UNKNOWN_BROKEN']
26 regressed_successes = [pkg[0] for pkg in pkgs if pkg[1] == 'KNOWN_BROKEN'] 26 regressed_successes = [pkg['pn'] for pkgs in pkggroups for pkg in pkgs if pkg['status'] == 'KNOWN_BROKEN']
27 msg = "" 27 msg = ""
28 if len(regressed_failures) > 0: 28 if len(regressed_failures) > 0:
29 msg = msg + """ 29 msg = msg + """
@@ -55,7 +55,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
55 return False 55 return False
56 56
57 def is_maintainer_exception(entry): 57 def is_maintainer_exception(entry):
58 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data", 58 exceptions = ["musl", "newlib", "picolibc", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
59 "cve-update-nvd2-native",] 59 "cve-update-nvd2-native",]
60 for i in exceptions: 60 for i in exceptions:
61 if i in entry: 61 if i in entry:
@@ -115,3 +115,15 @@ The list of oe-core recipes with maintainers is empty. This may indicate that th
115 self.fail(""" 115 self.fail("""
116Unable to find recipes for the following entries in maintainers.inc: 116Unable to find recipes for the following entries in maintainers.inc:
117""" + "\n".join(['%s' % i for i in missing_recipes])) 117""" + "\n".join(['%s' % i for i in missing_recipes]))
118
119 def test_common_include_recipes(self):
120 """
121 Summary: Test that obtaining recipes that share includes between them returns a sane result
122 Expected: At least cmake and qemu entries are present in the output
123 Product: oe-core
124 Author: Alexander Kanavin <alex.kanavin@gmail.com>
125 """
126 recipes = oe.recipeutils.get_common_include_recipes()
127
128 self.assertIn({'qemu-system-native', 'qemu', 'qemu-native'}, recipes)
129 self.assertIn({'cmake-native', 'cmake'}, recipes)
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index 347c065377..0b5f4602fb 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -11,6 +11,51 @@ import re
11 11
12class FitImageTests(OESelftestTestCase): 12class FitImageTests(OESelftestTestCase):
13 13
14 def _setup_uboot_tools_native(self):
15 """build u-boot-tools-native and return RECIPE_SYSROOT_NATIVE"""
16 bitbake("u-boot-tools-native -c addto_recipe_sysroot")
17 return get_bb_var('RECIPE_SYSROOT_NATIVE', 'u-boot-tools-native')
18
19 def _verify_fit_image_signature(self, uboot_tools_sysroot_native, fitimage_path, dtb_path, conf_name=None):
20 """Verify the signature of a fit contfiguration
21
22 The fit_check_sign utility from u-boot-tools-native is called.
23 uboot-fit_check_sign -f fitImage -k $dtb_name -c conf-$dtb_name
24 """
25 fit_check_sign_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'uboot-fit_check_sign')
26 cmd = '%s -f %s -k %s' % (fit_check_sign_path, fitimage_path, dtb_path)
27 if conf_name:
28 cmd += ' -c %s' % conf_name
29 result = runCmd(cmd)
30 self.logger.debug("%s\nreturned: %s\n%s", cmd, str(result.status), result.output)
31 self.assertIn("Signature check OK", result.output)
32
33 @staticmethod
34 def _find_string_in_bin_file(file_path, search_string):
35 """find stings in a binary file
36
37 Shell equivalent: strings "$1" | grep "$2" | wc -l
38 return number of matches
39 """
40 found_positions = 0
41 with open(file_path, 'rb') as file:
42 byte = file.read(1)
43 current_position = 0
44 current_match = 0
45 while byte:
46 char = byte.decode('ascii', errors='ignore')
47 if char == search_string[current_match]:
48 current_match += 1
49 if current_match == len(search_string):
50 found_positions += 1
51 current_match = 0
52 else:
53 current_match = 0
54 current_position += 1
55 byte = file.read(1)
56 return found_positions
57
58
14 def test_fit_image(self): 59 def test_fit_image(self):
15 """ 60 """
16 Summary: Check if FIT image and Image Tree Source (its) are built 61 Summary: Check if FIT image and Image Tree Source (its) are built
@@ -53,10 +98,8 @@ FIT_DESC = "A model description"
53 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 98 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
54 "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME'])) 99 "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
55 100
56 self.assertTrue(os.path.exists(fitimage_its_path), 101 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
57 "%s image tree source doesn't exist" % (fitimage_its_path)) 102 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
58 self.assertTrue(os.path.exists(fitimage_path),
59 "%s FIT image doesn't exist" % (fitimage_path))
60 103
61 # Check that the type, load address, entrypoint address and default 104 # Check that the type, load address, entrypoint address and default
62 # values for kernel and ramdisk in Image Tree Source are as expected. 105 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -108,19 +151,21 @@ FIT_DESC = "A model description"
108 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon 151 Author: Paul Eggleton <paul.eggleton@microsoft.com> based upon
109 work by Usama Arif <usama.arif@arm.com> 152 work by Usama Arif <usama.arif@arm.com>
110 """ 153 """
154 a_comment = "a smart comment"
111 config = """ 155 config = """
112# Enable creation of fitImage 156# Enable creation of fitImage
113MACHINE = "beaglebone-yocto" 157MACHINE = "beaglebone-yocto"
114KERNEL_IMAGETYPES += " fitImage " 158KERNEL_IMAGETYPES += " fitImage "
115KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper " 159KERNEL_CLASSES = " kernel-fitimage "
116UBOOT_SIGN_ENABLE = "1" 160UBOOT_SIGN_ENABLE = "1"
117FIT_GENERATE_KEYS = "1" 161FIT_GENERATE_KEYS = "1"
118UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 162UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
119UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" 163UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
120UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" 164UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
121FIT_SIGN_INDIVIDUAL = "1" 165FIT_SIGN_INDIVIDUAL = "1"
122UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" 166UBOOT_MKIMAGE_SIGN_ARGS = "-c '%s'"
123""" 167""" % a_comment
168
124 self.write_config(config) 169 self.write_config(config)
125 170
126 # fitImage is created as part of linux recipe 171 # fitImage is created as part of linux recipe
@@ -133,10 +178,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
133 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 178 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
134 "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME'])) 179 "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
135 180
136 self.assertTrue(os.path.exists(fitimage_its_path), 181 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
137 "%s image tree source doesn't exist" % (fitimage_its_path)) 182 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
138 self.assertTrue(os.path.exists(fitimage_path),
139 "%s FIT image doesn't exist" % (fitimage_path))
140 183
141 req_itspaths = [ 184 req_itspaths = [
142 ['/', 'images', 'kernel-1'], 185 ['/', 'images', 'kernel-1'],
@@ -195,10 +238,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
195 self.assertEqual(value, reqvalue) 238 self.assertEqual(value, reqvalue)
196 239
197 # Dump the image to see if it really got signed 240 # Dump the image to see if it really got signed
198 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 241 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
199 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 242 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
200 recipe_sysroot_native = result.output.split('=')[1].strip('"')
201 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
202 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 243 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
203 in_signed = None 244 in_signed = None
204 signed_sections = {} 245 signed_sections = {}
@@ -224,17 +265,15 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
224 value = values.get('Sign value', None) 265 value = values.get('Sign value', None)
225 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 266 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
226 267
227 # Check for UBOOT_MKIMAGE_SIGN_ARGS 268 # Search for the string passed to mkimage: 1 kernel + 3 DTBs + config per DTB = 7 sections
228 result = runCmd('bitbake -e virtual/kernel | grep ^T=') 269 # Looks like mkimage supports to add a comment but does not support to read it back.
229 tempdir = result.output.split('=', 1)[1].strip().strip('') 270 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
230 result = runCmd('grep "a smart comment" %s/run.do_assemble_fitimage' % tempdir, ignore_status=True) 271 self.assertEqual(found_comments, 7, "Expected 7 signed and commented section in the fitImage.")
231 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN_ARGS value did not get used')
232 272
233 # Check for evidence of test-mkimage-wrapper class 273 # Verify the signature for all configurations = DTBs
234 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) 274 for dtb in ['am335x-bone.dtb', 'am335x-boneblack.dtb', 'am335x-bonegreen.dtb']:
235 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work') 275 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
236 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) 276 os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], dtb), 'conf-' + dtb)
237 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
238 277
239 def test_uboot_fit_image(self): 278 def test_uboot_fit_image(self):
240 """ 279 """
@@ -287,10 +326,8 @@ FIT_SIGN_INDIVIDUAL = "1"
287 fitimage_path = os.path.join(deploy_dir_image, 326 fitimage_path = os.path.join(deploy_dir_image,
288 "u-boot-fitImage-%s" % (machine,)) 327 "u-boot-fitImage-%s" % (machine,))
289 328
290 self.assertTrue(os.path.exists(fitimage_its_path), 329 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
291 "%s image tree source doesn't exist" % (fitimage_its_path)) 330 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
292 self.assertTrue(os.path.exists(fitimage_path),
293 "%s FIT image doesn't exist" % (fitimage_path))
294 331
295 # Check that the type, load address, entrypoint address and default 332 # Check that the type, load address, entrypoint address and default
296 # values for kernel and ramdisk in Image Tree Source are as expected. 333 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -351,7 +388,6 @@ UBOOT_ENTRYPOINT = "0x80080000"
351UBOOT_FIT_DESC = "A model description" 388UBOOT_FIT_DESC = "A model description"
352KERNEL_IMAGETYPES += " fitImage " 389KERNEL_IMAGETYPES += " fitImage "
353KERNEL_CLASSES = " kernel-fitimage " 390KERNEL_CLASSES = " kernel-fitimage "
354INHERIT += "test-mkimage-wrapper"
355UBOOT_SIGN_ENABLE = "1" 391UBOOT_SIGN_ENABLE = "1"
356FIT_GENERATE_KEYS = "1" 392FIT_GENERATE_KEYS = "1"
357UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 393UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
@@ -372,10 +408,8 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
372 fitimage_path = os.path.join(deploy_dir_image, 408 fitimage_path = os.path.join(deploy_dir_image,
373 "u-boot-fitImage-%s" % (machine,)) 409 "u-boot-fitImage-%s" % (machine,))
374 410
375 self.assertTrue(os.path.exists(fitimage_its_path), 411 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
376 "%s image tree source doesn't exist" % (fitimage_its_path)) 412 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
377 self.assertTrue(os.path.exists(fitimage_path),
378 "%s FIT image doesn't exist" % (fitimage_path))
379 413
380 # Check that the type, load address, entrypoint address and default 414 # Check that the type, load address, entrypoint address and default
381 # values for kernel and ramdisk in Image Tree Source are as expected. 415 # values for kernel and ramdisk in Image Tree Source are as expected.
@@ -425,6 +459,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
425 work by Paul Eggleton <paul.eggleton@microsoft.com> and 459 work by Paul Eggleton <paul.eggleton@microsoft.com> and
426 Usama Arif <usama.arif@arm.com> 460 Usama Arif <usama.arif@arm.com>
427 """ 461 """
462 a_comment = "a smart U-Boot comment"
428 config = """ 463 config = """
429# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at 464# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
430# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set 465# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
@@ -434,7 +469,6 @@ SPL_BINARY = "MLO"
434# The kernel-fitimage class is a dependency even if we're only 469# The kernel-fitimage class is a dependency even if we're only
435# creating/signing the U-Boot fitImage 470# creating/signing the U-Boot fitImage
436KERNEL_CLASSES = " kernel-fitimage" 471KERNEL_CLASSES = " kernel-fitimage"
437INHERIT += "test-mkimage-wrapper"
438# Enable creation and signing of the U-Boot fitImage 472# Enable creation and signing of the U-Boot fitImage
439UBOOT_FITIMAGE_ENABLE = "1" 473UBOOT_FITIMAGE_ENABLE = "1"
440SPL_SIGN_ENABLE = "1" 474SPL_SIGN_ENABLE = "1"
@@ -446,17 +480,17 @@ UBOOT_LOADADDRESS = "0x80000000"
446UBOOT_DTB_LOADADDRESS = "0x82000000" 480UBOOT_DTB_LOADADDRESS = "0x82000000"
447UBOOT_ARCH = "arm" 481UBOOT_ARCH = "arm"
448SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 482SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
449SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'" 483SPL_MKIMAGE_SIGN_ARGS = "-c '%s'"
450UBOOT_EXTLINUX = "0" 484UBOOT_EXTLINUX = "0"
451UBOOT_FIT_GENERATE_KEYS = "1" 485UBOOT_FIT_GENERATE_KEYS = "1"
452UBOOT_FIT_HASH_ALG = "sha256" 486UBOOT_FIT_HASH_ALG = "sha256"
453""" 487""" % a_comment
488
454 self.write_config(config) 489 self.write_config(config)
455 490
456 # The U-Boot fitImage is created as part of the U-Boot recipe 491 # The U-Boot fitImage is created as part of the U-Boot recipe
457 bitbake("virtual/bootloader") 492 bitbake("virtual/bootloader")
458 493
459 image_type = "core-image-minimal"
460 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 494 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
461 machine = get_bb_var('MACHINE') 495 machine = get_bb_var('MACHINE')
462 fitimage_its_path = os.path.join(deploy_dir_image, 496 fitimage_its_path = os.path.join(deploy_dir_image,
@@ -464,10 +498,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
464 fitimage_path = os.path.join(deploy_dir_image, 498 fitimage_path = os.path.join(deploy_dir_image,
465 "u-boot-fitImage-%s" % (machine,)) 499 "u-boot-fitImage-%s" % (machine,))
466 500
467 self.assertTrue(os.path.exists(fitimage_its_path), 501 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
468 "%s image tree source doesn't exist" % (fitimage_its_path)) 502 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
469 self.assertTrue(os.path.exists(fitimage_path),
470 "%s FIT image doesn't exist" % (fitimage_path))
471 503
472 req_itspaths = [ 504 req_itspaths = [
473 ['/', 'images', 'uboot'], 505 ['/', 'images', 'uboot'],
@@ -516,10 +548,8 @@ UBOOT_FIT_HASH_ALG = "sha256"
516 self.assertEqual(value, reqvalue) 548 self.assertEqual(value, reqvalue)
517 549
518 # Dump the image to see if it really got signed 550 # Dump the image to see if it really got signed
519 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 551 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
520 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 552 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
521 recipe_sysroot_native = result.output.split('=')[1].strip('"')
522 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
523 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 553 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
524 in_signed = None 554 in_signed = None
525 signed_sections = {} 555 signed_sections = {}
@@ -542,16 +572,14 @@ UBOOT_FIT_HASH_ALG = "sha256"
542 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 572 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
543 573
544 # Check for SPL_MKIMAGE_SIGN_ARGS 574 # Check for SPL_MKIMAGE_SIGN_ARGS
545 result = runCmd('bitbake -e virtual/bootloader | grep ^T=') 575 # Looks like mkimage supports to add a comment but does not support to read it back.
546 tempdir = result.output.split('=', 1)[1].strip().strip('') 576 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
547 result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) 577 self.assertEqual(found_comments, 2, "Expected 2 signed and commented section in the fitImage.")
548 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used') 578
579 # Verify the signature
580 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
581 os.path.join(deploy_dir_image, 'u-boot-spl.dtb'))
549 582
550 # Check for evidence of test-mkimage-wrapper class
551 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
552 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
553 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
554 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
555 583
556 def test_sign_cascaded_uboot_fit_image(self): 584 def test_sign_cascaded_uboot_fit_image(self):
557 """ 585 """
@@ -573,6 +601,7 @@ UBOOT_FIT_HASH_ALG = "sha256"
573 work by Paul Eggleton <paul.eggleton@microsoft.com> and 601 work by Paul Eggleton <paul.eggleton@microsoft.com> and
574 Usama Arif <usama.arif@arm.com> 602 Usama Arif <usama.arif@arm.com>
575 """ 603 """
604 a_comment = "a smart cascaded U-Boot comment"
576 config = """ 605 config = """
577# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at 606# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
578# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set 607# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
@@ -588,7 +617,7 @@ UBOOT_DTB_BINARY = "u-boot.dtb"
588UBOOT_ENTRYPOINT = "0x80000000" 617UBOOT_ENTRYPOINT = "0x80000000"
589UBOOT_LOADADDRESS = "0x80000000" 618UBOOT_LOADADDRESS = "0x80000000"
590UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 619UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
591UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'" 620UBOOT_MKIMAGE_SIGN_ARGS = "-c '%s'"
592UBOOT_DTB_LOADADDRESS = "0x82000000" 621UBOOT_DTB_LOADADDRESS = "0x82000000"
593UBOOT_ARCH = "arm" 622UBOOT_ARCH = "arm"
594SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 623SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
@@ -598,20 +627,18 @@ UBOOT_FIT_GENERATE_KEYS = "1"
598UBOOT_FIT_HASH_ALG = "sha256" 627UBOOT_FIT_HASH_ALG = "sha256"
599KERNEL_IMAGETYPES += " fitImage " 628KERNEL_IMAGETYPES += " fitImage "
600KERNEL_CLASSES = " kernel-fitimage " 629KERNEL_CLASSES = " kernel-fitimage "
601INHERIT += "test-mkimage-wrapper"
602UBOOT_SIGN_ENABLE = "1" 630UBOOT_SIGN_ENABLE = "1"
603FIT_GENERATE_KEYS = "1" 631FIT_GENERATE_KEYS = "1"
604UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 632UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
605UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest" 633UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
606UBOOT_SIGN_KEYNAME = "cfg-oe-selftest" 634UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
607FIT_SIGN_INDIVIDUAL = "1" 635FIT_SIGN_INDIVIDUAL = "1"
608""" 636""" % a_comment
609 self.write_config(config) 637 self.write_config(config)
610 638
611 # The U-Boot fitImage is created as part of the U-Boot recipe 639 # The U-Boot fitImage is created as part of the U-Boot recipe
612 bitbake("virtual/bootloader") 640 bitbake("virtual/bootloader")
613 641
614 image_type = "core-image-minimal"
615 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 642 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
616 machine = get_bb_var('MACHINE') 643 machine = get_bb_var('MACHINE')
617 fitimage_its_path = os.path.join(deploy_dir_image, 644 fitimage_its_path = os.path.join(deploy_dir_image,
@@ -619,10 +646,8 @@ FIT_SIGN_INDIVIDUAL = "1"
619 fitimage_path = os.path.join(deploy_dir_image, 646 fitimage_path = os.path.join(deploy_dir_image,
620 "u-boot-fitImage-%s" % (machine,)) 647 "u-boot-fitImage-%s" % (machine,))
621 648
622 self.assertTrue(os.path.exists(fitimage_its_path), 649 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
623 "%s image tree source doesn't exist" % (fitimage_its_path)) 650 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
624 self.assertTrue(os.path.exists(fitimage_path),
625 "%s FIT image doesn't exist" % (fitimage_path))
626 651
627 req_itspaths = [ 652 req_itspaths = [
628 ['/', 'images', 'uboot'], 653 ['/', 'images', 'uboot'],
@@ -671,10 +696,8 @@ FIT_SIGN_INDIVIDUAL = "1"
671 self.assertEqual(value, reqvalue) 696 self.assertEqual(value, reqvalue)
672 697
673 # Dump the image to see if it really got signed 698 # Dump the image to see if it really got signed
674 bitbake("u-boot-tools-native -c addto_recipe_sysroot") 699 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
675 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=') 700 dumpimage_path = os.path.join(uboot_tools_sysroot_native, 'usr', 'bin', 'dumpimage')
676 recipe_sysroot_native = result.output.split('=')[1].strip('"')
677 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
678 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path)) 701 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
679 in_signed = None 702 in_signed = None
680 signed_sections = {} 703 signed_sections = {}
@@ -697,17 +720,13 @@ FIT_SIGN_INDIVIDUAL = "1"
697 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 720 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
698 721
699 # Check for SPL_MKIMAGE_SIGN_ARGS 722 # Check for SPL_MKIMAGE_SIGN_ARGS
700 result = runCmd('bitbake -e virtual/bootloader | grep ^T=') 723 # Looks like mkimage supports to add a comment but does not support to read it back.
701 tempdir = result.output.split('=', 1)[1].strip().strip('') 724 found_comments = FitImageTests._find_string_in_bin_file(fitimage_path, a_comment)
702 result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True) 725 self.assertEqual(found_comments, 2, "Expected 2 signed and commented section in the fitImage.")
703 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
704
705 # Check for evidence of test-mkimage-wrapper class
706 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
707 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
708 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
709 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
710 726
727 # Verify the signature
728 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path,
729 os.path.join(deploy_dir_image, 'u-boot-spl.dtb'))
711 730
712 731
713 def test_initramfs_bundle(self): 732 def test_initramfs_bundle(self):
@@ -755,24 +774,24 @@ FIT_HASH_ALG = "sha256"
755 # fitImage is created as part of linux recipe 774 # fitImage is created as part of linux recipe
756 bitbake("virtual/kernel") 775 bitbake("virtual/kernel")
757 776
758 image_type = get_bb_var('INITRAMFS_IMAGE') 777 bb_vars = get_bb_vars([
759 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 778 'DEPLOY_DIR_IMAGE',
760 machine = get_bb_var('MACHINE') 779 'FIT_HASH_ALG',
761 fitimage_its_path = os.path.join(deploy_dir_image, 780 'FIT_KERNEL_COMP_ALG',
762 "fitImage-its-%s-%s-%s" % (image_type, machine, machine)) 781 'INITRAMFS_IMAGE',
763 fitimage_path = os.path.join(deploy_dir_image,"fitImage") 782 'MACHINE',
764 783 'UBOOT_ARCH',
765 self.assertTrue(os.path.exists(fitimage_its_path), 784 'UBOOT_ENTRYPOINT',
766 "%s image tree source doesn't exist" % (fitimage_its_path)) 785 'UBOOT_LOADADDRESS',
767 self.assertTrue(os.path.exists(fitimage_path), 786 'UBOOT_MKIMAGE_KERNEL_TYPE'
768 "%s FIT image doesn't exist" % (fitimage_path)) 787 ],
788 'virtual/kernel')
789 fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
790 "fitImage-its-%s-%s-%s" % (bb_vars['INITRAMFS_IMAGE'], bb_vars['MACHINE'], bb_vars['MACHINE']))
791 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],"fitImage")
769 792
770 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) 793 self.assertExists(fitimage_its_path, "%s image tree source doesn't exist" % (fitimage_its_path))
771 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) 794 self.assertExists(fitimage_path, "%s FIT image doesn't exist" % (fitimage_path))
772 kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
773 kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
774 uboot_arch = str(get_bb_var('UBOOT_ARCH'))
775 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
776 795
777 its_file = open(fitimage_its_path) 796 its_file = open(fitimage_its_path)
778 797
@@ -782,31 +801,31 @@ FIT_HASH_ALG = "sha256"
782 'kernel-1 {', 801 'kernel-1 {',
783 'description = "Linux kernel";', 802 'description = "Linux kernel";',
784 'data = /incbin/("linux.bin");', 803 'data = /incbin/("linux.bin");',
785 'type = "' + kernel_type + '";', 804 'type = "' + str(bb_vars['UBOOT_MKIMAGE_KERNEL_TYPE']) + '";',
786 'arch = "' + uboot_arch + '";', 805 'arch = "' + str(bb_vars['UBOOT_ARCH']) + '";',
787 'os = "linux";', 806 'os = "linux";',
788 'compression = "' + kernel_compression + '";', 807 'compression = "' + str(bb_vars['FIT_KERNEL_COMP_ALG']) + '";',
789 'load = <' + kernel_load + '>;', 808 'load = <' + str(bb_vars['UBOOT_LOADADDRESS']) + '>;',
790 'entry = <' + kernel_entry + '>;', 809 'entry = <' + str(bb_vars['UBOOT_ENTRYPOINT']) + '>;',
791 'hash-1 {', 810 'hash-1 {',
792 'algo = "' + fit_hash_alg +'";', 811 'algo = "' + str(bb_vars['FIT_HASH_ALG']) +'";',
793 '};', 812 '};',
794 '};' 813 '};'
795 ] 814 ]
796 815
797 node_str = exp_node_lines[0] 816 node_str = exp_node_lines[0]
798 817
799 test_passed = False
800
801 print ("checking kernel node\n") 818 print ("checking kernel node\n")
819 self.assertIn(node_str, its_lines)
802 820
803 if node_str in its_lines: 821 node_start_idx = its_lines.index(node_str)
804 node_start_idx = its_lines.index(node_str) 822 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))]
805 node = its_lines[node_start_idx:(node_start_idx + len(exp_node_lines))] 823
806 if node == exp_node_lines: 824 # Remove the absolute path. This refers to WORKDIR which is not always predictable.
807 print("kernel node verified") 825 re_data = re.compile(r'^data = /incbin/\(.*/linux\.bin"\);$')
808 else: 826 node = [re.sub(re_data, 'data = /incbin/("linux.bin");', cfg_str) for cfg_str in node]
809 self.assertTrue(test_passed == True,"kernel node does not match expectation") 827
828 self.assertEqual(node, exp_node_lines, "kernel node does not match expectation")
810 829
811 rx_configs = re.compile("^conf-.*") 830 rx_configs = re.compile("^conf-.*")
812 its_configs = list(filter(rx_configs.match, its_lines)) 831 its_configs = list(filter(rx_configs.match, its_lines))
@@ -822,25 +841,14 @@ FIT_HASH_ALG = "sha256"
822 841
823 node = its_lines[cfg_start_idx:line_idx] 842 node = its_lines[cfg_start_idx:line_idx]
824 print("checking configuration " + cfg_str.rstrip(" {")) 843 print("checking configuration " + cfg_str.rstrip(" {"))
825 rx_desc_line = re.compile("^description.*1 Linux kernel.*") 844 rx_desc_line = re.compile(r'^description = ".*Linux kernel.*')
826 if len(list(filter(rx_desc_line.match, node))) != 1: 845 self.assertEqual(len(list(filter(rx_desc_line.match, node))), 1, "kernel keyword not found in the description line")
827 self.assertTrue(test_passed == True,"kernel keyword not found in the description line")
828 break
829 else:
830 print("kernel keyword found in the description line")
831 846
832 if 'kernel = "kernel-1";' not in node: 847 self.assertIn('kernel = "kernel-1";', node)
833 self.assertTrue(test_passed == True,"kernel line not found")
834 break
835 else:
836 print("kernel line found")
837 848
838 rx_sign_line = re.compile("^sign-images.*kernel.*") 849 rx_sign_line = re.compile(r'^sign-images = .*kernel.*')
839 if len(list(filter(rx_sign_line.match, node))) != 1: 850 self.assertEqual(len(list(filter(rx_sign_line.match, node))), 1, "kernel hash not signed")
840 self.assertTrue(test_passed == True,"kernel hash not signed")
841 break
842 else:
843 print("kernel hash signed")
844 851
845 test_passed = True 852 # Verify the signature
846 self.assertTrue(test_passed == True,"Initramfs bundle test success") 853 uboot_tools_sysroot_native = self._setup_uboot_tools_native()
854 self._verify_fit_image_signature(uboot_tools_sysroot_native, fitimage_path, os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], 'am335x-bone.dtb'))
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index 89360178fe..4cc0894d42 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -37,7 +37,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
37 features = [] 37 features = []
38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets))) 38 features.append('MAKE_CHECK_TARGETS = "{0}"'.format(" ".join(targets)))
39 if ssh is not None: 39 if ssh is not None:
40 features.append('TOOLCHAIN_TEST_TARGET = "ssh"') 40 features.append('TOOLCHAIN_TEST_TARGET = "linux-ssh"')
41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh)) 41 features.append('TOOLCHAIN_TEST_HOST = "{0}"'.format(ssh))
42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 42 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 43 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index dc88c222bd..94d01ba116 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -250,12 +250,7 @@ USERADD_GID_TABLES += "files/static-group"
250DISTRO_FEATURES:append = " pam opengl wayland" 250DISTRO_FEATURES:append = " pam opengl wayland"
251 251
252# Switch to systemd 252# Switch to systemd
253DISTRO_FEATURES:append = " systemd usrmerge" 253INIT_MANAGER = "systemd"
254VIRTUAL-RUNTIME_init_manager = "systemd"
255VIRTUAL-RUNTIME_initscripts = ""
256VIRTUAL-RUNTIME_syslog = ""
257VIRTUAL-RUNTIME_login_manager = "shadow-base"
258DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"
259 254
260# Replace busybox 255# Replace busybox
261PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils" 256PREFERRED_PROVIDER_virtual/base-utils = "packagegroup-core-base-utils"
@@ -319,7 +314,7 @@ SKIP_RECIPE[busybox] = "Don't build this"
319 """ 314 """
320 config = """ 315 config = """
321DISTRO_FEATURES:append = " api-documentation" 316DISTRO_FEATURES:append = " api-documentation"
322CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc" 317CORE_IMAGE_EXTRA_INSTALL = "man-pages"
323""" 318"""
324 self.write_config(config) 319 self.write_config(config)
325 bitbake("core-image-minimal") 320 bitbake("core-image-minimal")
@@ -330,7 +325,7 @@ CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
330 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output)) 325 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
331 self.assertIn("iso_8859_15", output) 326 self.assertIn("iso_8859_15", output)
332 327
333 # This manpage is provided by kmod 328 # This manpage is provided by man-pages
334 status, output = qemu.run_serial("man --pager=cat modprobe") 329 status, output = qemu.run_serial("man --pager=cat intro")
335 self.assertEqual(status, 1, 'Failed to run man: %s' % (output)) 330 self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
336 self.assertIn("force-modversion", output) 331 self.assertIn("introduction to user commands", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index f4af67a239..be5484bca4 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -114,7 +114,7 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
114 114
115 def test_bash_and_license(self): 115 def test_bash_and_license(self):
116 self.disable_class("create-spdx") 116 self.disable_class("create-spdx")
117 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"') 117 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"')
118 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" 118 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
119 119
120 result = bitbake('core-image-minimal', ignore_status=True) 120 result = bitbake('core-image-minimal', ignore_status=True)
@@ -123,12 +123,12 @@ INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
123 123
124 def test_bash_or_license(self): 124 def test_bash_or_license(self):
125 self.disable_class("create-spdx") 125 self.disable_class("create-spdx")
126 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"') 126 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"\nERROR_QA:remove:pn-bash = "license-exists"\nERROR_QA:remove:pn-core-image-minimal = "license-file-missing"')
127 127
128 bitbake('core-image-minimal') 128 bitbake('core-image-minimal')
129 129
130 def test_bash_license_exceptions(self): 130 def test_bash_license_exceptions(self):
131 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"') 131 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"\nERROR_QA:remove:pn-core-image-minimal = "license-exception"')
132 132
133 bitbake('core-image-minimal') 133 bitbake('core-image-minimal')
134 134
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 379ed589ad..64b17117cc 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -37,7 +37,7 @@ FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
37SRC_URI:append = " file://appendtest.txt" 37SRC_URI:append = " file://appendtest.txt"
38 38
39sysroot_stage_all:append() { 39sysroot_stage_all:append() {
40 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ 40 install -m 644 ${UNPACKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
41} 41}
42 42
43""" 43"""
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
index 4ca8ffb7aa..ac4888ef66 100644
--- a/meta/lib/oeqa/selftest/cases/locales.py
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -14,7 +14,7 @@ class LocalesTest(OESelftestTestCase):
14 features = [] 14 features = []
15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"') 15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"') 16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"') 17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8 en_US.ISO-8859-1 de_DE.UTF-8 fr_FR.ISO-8859-1 zh_HK.BIG5-HKSCS tr_TR.UTF-8"')
18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"') 18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
19 if binary_enabled: 19 if binary_enabled:
20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"') 20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index ffe0d2604d..5a17ca52ea 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -20,8 +20,8 @@ class MetaIDE(OESelftestTestCase):
20 bitbake('meta-ide-support') 20 bitbake('meta-ide-support')
21 bitbake('build-sysroots -c build_native_sysroot') 21 bitbake('build-sysroots -c build_native_sysroot')
22 bitbake('build-sysroots -c build_target_sysroot') 22 bitbake('build-sysroots -c build_target_sysroot')
23 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE']) 23 bb_vars = get_bb_vars(['MACHINE_ARCH', 'TARGET_VENDOR', 'TARGET_OS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
24 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] 24 cls.environment_script = 'environment-setup-%s%s-%s' % (bb_vars['MACHINE_ARCH'], bb_vars['TARGET_VENDOR'], bb_vars['TARGET_OS'])
25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE'] 25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script) 26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
27 cls.corebasedir = bb_vars['COREBASE'] 27 cls.corebasedir = bb_vars['COREBASE']
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
index 2919f07939..a8923460f9 100644
--- a/meta/lib/oeqa/selftest/cases/minidebuginfo.py
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -8,6 +8,7 @@ import subprocess
8import tempfile 8import tempfile
9import shutil 9import shutil
10 10
11from oeqa.core.decorator import OETestTag
11from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd 13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
13 14
@@ -42,3 +43,18 @@ IMAGE_FSTYPES = "tar.bz2"
42 native_sysroot = native_sysroot, target_sys = target_sys) 43 native_sysroot = native_sysroot, target_sys = target_sys)
43 self.assertIn(".gnu_debugdata", r.output) 44 self.assertIn(".gnu_debugdata", r.output)
44 45
46 @OETestTag("runqemu")
47 def test_minidebuginfo_qemu(self):
48 """
49 Test minidebuginfo inside a qemu.
50 This runs test_systemd_coredump_minidebuginfo and other minidebuginfo runtime tests which may be added in the future.
51 """
52
53 self.write_config("""
54DISTRO_FEATURES:append = " minidebuginfo"
55INIT_MANAGER = "systemd"
56IMAGE_CLASSES += "testimage"
57TEST_SUITES = "ping ssh systemd"
58 """)
59 bitbake('core-image-minimal')
60 bitbake('-c testimage core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index f69efccfee..bfbc33b08d 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -175,7 +175,7 @@ class OEListPackageconfigTests(OESelftestTestCase):
175 def test_packageconfig_flags_option_all(self): 175 def test_packageconfig_flags_option_all(self):
176 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) 176 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
177 expected_endlines = [] 177 expected_endlines = []
178 expected_endlines.append("pinentry-1.2.1") 178 expected_endlines.append("pinentry-1.3.1")
179 expected_endlines.append("PACKAGECONFIG ncurses") 179 expected_endlines.append("PACKAGECONFIG ncurses")
180 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") 180 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
181 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") 181 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
index e31063567b..580fbdcb9c 100644
--- a/meta/lib/oeqa/selftest/cases/overlayfs.py
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -5,7 +5,7 @@
5# 5#
6 6
7from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, runqemu 8from oeqa.utils.commands import bitbake, runqemu, get_bb_vars
9from oeqa.core.decorator import OETestTag 9from oeqa.core.decorator import OETestTag
10from oeqa.core.decorator.data import skipIfNotMachine 10from oeqa.core.decorator.data import skipIfNotMachine
11 11
@@ -466,6 +466,45 @@ IMAGE_INSTALL:append = " overlayfs-user"
466 line = getline_qemu(output, "Read-only file system") 466 line = getline_qemu(output, "Read-only file system")
467 self.assertTrue(line, msg=output) 467 self.assertTrue(line, msg=output)
468 468
469 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
470 def test_postinst_on_target_for_read_only_rootfs(self):
471 """
472 Summary: The purpose of this test case is to verify that post-installation
473 on target scripts are executed even if using read-only rootfs when
474 read-only-rootfs-delayed-postinsts is set
475 Expected: The test files are created on first boot
476 """
477
478 import oe.path
479
480 vars = get_bb_vars(("IMAGE_ROOTFS", "sysconfdir"), "core-image-minimal")
481 sysconfdir = vars["sysconfdir"]
482 self.assertIsNotNone(sysconfdir)
483 # Need to use oe.path here as sysconfdir starts with /
484 targettestdir = os.path.join(sysconfdir, "postinst-test")
485
486 config = self.get_working_config()
487
488 args = {
489 'OVERLAYFS_INIT_OPTION': "",
490 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
491 'OVERLAYFS_ROOTFS_TYPE': "ext4",
492 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
493 }
494
495 # read-only-rootfs is already set in get_working_config()
496 config += 'EXTRA_IMAGE_FEATURES += "read-only-rootfs-delayed-postinsts"\n'
497 config += 'CORE_IMAGE_EXTRA_INSTALL = "postinst-delayed-b"\n'
498
499 self.write_config(config.format(**args))
500
501 res = bitbake('core-image-minimal')
502
503 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
504 for filename in ("rootfs", "delayed-a", "delayed-b"):
505 status, output = qemu.run_serial("test -f %s && echo found" % os.path.join(targettestdir, filename))
506 self.assertIn("found", output, "%s was not present on boot" % filename)
507
469 def get_working_config(self): 508 def get_working_config(self):
470 return """ 509 return """
471# Use systemd as init manager 510# Use systemd as init manager
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 1aa6c03f8a..38ed7173fe 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -103,11 +103,37 @@ class PackageTests(OESelftestTestCase):
103 103
104 dest = get_bb_var('PKGDEST', 'selftest-hardlink') 104 dest = get_bb_var('PKGDEST', 'selftest-hardlink')
105 bindir = get_bb_var('bindir', 'selftest-hardlink') 105 bindir = get_bb_var('bindir', 'selftest-hardlink')
106 libdir = get_bb_var('libdir', 'selftest-hardlink')
107 libexecdir = get_bb_var('libexecdir', 'selftest-hardlink')
106 108
107 def checkfiles(): 109 def checkfiles():
108 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/ 110 # Recipe creates 4 hardlinked files, there is a copy in package/ and a copy in packages-split/
109 # so expect 8 in total. 111 # so expect 8 in total.
110 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8) 112 self.assertEqual(os.stat(dest + "/selftest-hardlink" + bindir + "/hello1").st_nlink, 8)
113 self.assertEqual(os.stat(dest + "/selftest-hardlink" + libexecdir + "/hello3").st_nlink, 8)
114
115 # Check dbg version
116 # 2 items, a copy in both package/packages-split so 4
117 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + bindir + "/.debug/hello1").st_nlink, 4)
118 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello1").st_nlink, 4)
119
120 # Even though the libexecdir name is 'hello3' or 'hello4', that isn't the debug target name
121 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello3"), False)
122 self.assertEqual(os.path.exists(dest + "/selftest-hardlink-dbg" + libexecdir + "/.debug/hello4"), False)
123
124 # Check the staticdev libraries
125 # 101 items, a copy in both package/packages-split so 202
126 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello.a").st_nlink, 202)
127 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-25.a").st_nlink, 202)
128 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-50.a").st_nlink, 202)
129 self.assertEqual(os.stat(dest + "/selftest-hardlink-staticdev" + libdir + "/libhello-75.a").st_nlink, 202)
130
131 # Check static dbg
132 # 101 items, a copy in both package/packages-split so 202
133 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello.a").st_nlink, 202)
134 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-25.a").st_nlink, 202)
135 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-50.a").st_nlink, 202)
136 self.assertEqual(os.stat(dest + "/selftest-hardlink-dbg" + libdir + "/.debug-static/libhello-75.a").st_nlink, 202)
111 137
112 # Test a sparse file remains sparse 138 # Test a sparse file remains sparse
113 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest") 139 sparsestat = os.stat(dest + "/selftest-hardlink" + bindir + "/sparsetest")
diff --git a/meta/lib/oeqa/selftest/cases/picolibc.py b/meta/lib/oeqa/selftest/cases/picolibc.py
new file mode 100644
index 0000000000..e40b4fc3d3
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/picolibc.py
@@ -0,0 +1,18 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, get_bb_var
9
10class PicolibcTest(OESelftestTestCase):
11
12 def test_picolibc(self):
13 compatible_machines = ['qemuarm', 'qemuarm64', 'qemuriscv32', 'qemuriscv64']
14 machine = get_bb_var('MACHINE')
15 if machine not in compatible_machines:
16 self.skipTest('This test only works with machines : %s' % ' '.join(compatible_machines))
17 self.write_config('TCLIBC = "picolibc"')
18 bitbake("picolibc-helloworld")
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 126906df50..f742dd4d64 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -138,7 +138,7 @@ class RecipetoolAppendTests(RecipetoolBase):
138 '\n', 138 '\n',
139 'do_install:append() {\n', 139 'do_install:append() {\n',
140 ' install -d ${D}${%s}\n' % dirname, 140 ' install -d ${D}${%s}\n' % dirname,
141 ' install -m 0755 ${WORKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname), 141 ' install -m 0755 ${UNPACKDIR}/%s ${D}${%s}/ls\n' % (testfile2name, dirname),
142 '}\n'] 142 '}\n']
143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name]) 143 self._try_recipetool_appendfile('coreutils', lspath, testfile2, '-r coreutils', expectedlines, [testfile2name])
144 # Now try bbappending the same file again, contents should not change 144 # Now try bbappending the same file again, contents should not change
@@ -164,7 +164,7 @@ class RecipetoolAppendTests(RecipetoolBase):
164 '\n', 164 '\n',
165 'do_install:append() {\n', 165 'do_install:append() {\n',
166 ' install -d ${D}${datadir}\n', 166 ' install -d ${D}${datadir}\n',
167 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 167 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
168 '}\n'] 168 '}\n']
169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) 169 self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile'])
170 # Try adding another file, this time where the source file is executable 170 # Try adding another file, this time where the source file is executable
@@ -179,8 +179,8 @@ class RecipetoolAppendTests(RecipetoolBase):
179 '\n', 179 '\n',
180 'do_install:append() {\n', 180 'do_install:append() {\n',
181 ' install -d ${D}${datadir}\n', 181 ' install -d ${D}${datadir}\n',
182 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 182 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
183 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, 183 ' install -m 0755 ${UNPACKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
184 '}\n'] 184 '}\n']
185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name]) 185 self._try_recipetool_appendfile('netbase', '/usr/share/scriptname', testfile2, '-r netbase', expectedlines, ['testfile', testfile2name])
186 186
@@ -192,7 +192,7 @@ class RecipetoolAppendTests(RecipetoolBase):
192 '\n', 192 '\n',
193 'do_install:append() {\n', 193 'do_install:append() {\n',
194 ' install -d ${D}${bindir}\n', 194 ' install -d ${D}${bindir}\n',
195 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', 195 ' install -m 0755 ${UNPACKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
196 '}\n'] 196 '}\n']
197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile']) 197 _, output = self._try_recipetool_appendfile('netbase', '/usr/bin/selftest-recipetool-testbin', self.testfile, '-r netbase', expectedlines, ['testfile'])
198 self.assertNotIn('WARNING: ', output) 198 self.assertNotIn('WARNING: ', output)
@@ -207,7 +207,7 @@ class RecipetoolAppendTests(RecipetoolBase):
207 '\n', 207 '\n',
208 'do_install:append:mymachine() {\n', 208 'do_install:append:mymachine() {\n',
209 ' install -d ${D}${datadir}\n', 209 ' install -d ${D}${datadir}\n',
210 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 210 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/something\n',
211 '}\n'] 211 '}\n']
212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile']) 212 _, output = self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase -m mymachine', expectedlines, ['mymachine/testfile'])
213 self.assertNotIn('WARNING: ', output) 213 self.assertNotIn('WARNING: ', output)
@@ -241,7 +241,7 @@ class RecipetoolAppendTests(RecipetoolBase):
241 '\n', 241 '\n',
242 'do_install:append() {\n', 242 'do_install:append() {\n',
243 ' install -d ${D}${datadir}\n', 243 ' install -d ${D}${datadir}\n',
244 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', 244 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
245 '}\n'] 245 '}\n']
246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile']) 246 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-subdir', self.testfile, '', expectedlines, ['testfile'])
247 self.assertNotIn('WARNING: ', output) 247 self.assertNotIn('WARNING: ', output)
@@ -268,7 +268,7 @@ class RecipetoolAppendTests(RecipetoolBase):
268 '\n', 268 '\n',
269 'do_install:append() {\n', 269 'do_install:append() {\n',
270 ' install -d ${D}${sysconfdir}\n', 270 ' install -d ${D}${sysconfdir}\n',
271 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', 271 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
272 '}\n'] 272 '}\n']
273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile']) 273 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/etc/selftest-replaceme-patched', self.testfile, '', expectedlines, ['testfile'])
274 for line in output.splitlines(): 274 for line in output.splitlines():
@@ -286,7 +286,7 @@ class RecipetoolAppendTests(RecipetoolBase):
286 '\n', 286 '\n',
287 'do_install:append() {\n', 287 'do_install:append() {\n',
288 ' install -d ${D}${datadir}\n', 288 ' install -d ${D}${datadir}\n',
289 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', 289 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
290 '}\n'] 290 '}\n']
291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile']) 291 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-scripted', self.testfile, '', expectedlines, ['testfile'])
292 self.assertNotIn('WARNING: ', output) 292 self.assertNotIn('WARNING: ', output)
@@ -309,7 +309,7 @@ class RecipetoolAppendTests(RecipetoolBase):
309 '\n', 309 '\n',
310 'do_install:append() {\n', 310 'do_install:append() {\n',
311 ' install -d ${D}${datadir}\n', 311 ' install -d ${D}${datadir}\n',
312 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', 312 ' install -m 0644 ${UNPACKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
313 '}\n'] 313 '}\n']
314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile']) 314 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-postinst', self.testfile, '-r selftest-recipetool-appendfile', expectedlines, ['testfile'])
315 315
@@ -1068,6 +1068,7 @@ class RecipetoolTests(RecipetoolBase):
1068 1068
1069 d = DataConnectorCopy 1069 d = DataConnectorCopy
1070 d.getVar = Mock(return_value=commonlicdir) 1070 d.getVar = Mock(return_value=commonlicdir)
1071 d.expand = Mock(side_effect=lambda x: x)
1071 1072
1072 srctree = tempfile.mkdtemp(prefix='recipetoolqa') 1073 srctree = tempfile.mkdtemp(prefix='recipetoolqa')
1073 self.track_for_cleanup(srctree) 1074 self.track_for_cleanup(srctree)
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 2cb4445f81..9949737172 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -72,7 +72,7 @@ class RecipeUtilsTests(OESelftestTestCase):
72 expected_patch = """ 72 expected_patch = """
73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 73--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 74+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
75@@ -8,6 +8,4 @@ 75@@ -11,6 +11,4 @@
76 76
77 BBCLASSEXTEND = "native nativesdk" 77 BBCLASSEXTEND = "native nativesdk"
78 78
@@ -97,7 +97,7 @@ class RecipeUtilsTests(OESelftestTestCase):
97 expected_patch = """ 97 expected_patch = """
98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb 98--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb 99+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
100@@ -8,6 +8,3 @@ 100@@ -11,6 +11,3 @@
101 101
102 BBCLASSEXTEND = "native nativesdk" 102 BBCLASSEXTEND = "native nativesdk"
103 103
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 80e830136f..3d3f30eebc 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -133,7 +133,8 @@ class ReproducibleTests(OESelftestTestCase):
133 max_report_size = 250 * 1024 * 1024 133 max_report_size = 250 * 1024 * 1024
134 134
135 # targets are the things we want to test the reproducibility of 135 # targets are the things we want to test the reproducibility of
136 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] 136 # Have to add the virtual targets manually for now as builds may or may not include them as they're exclude from world
137 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world', 'virtual/librpc', 'virtual/libsdl2', 'virtual/crypt']
137 138
138 # sstate targets are things to pull from sstate to potentially cut build/debugging time 139 # sstate targets are things to pull from sstate to potentially cut build/debugging time
139 sstate_targets = [] 140 sstate_targets = []
@@ -273,9 +274,13 @@ class ReproducibleTests(OESelftestTestCase):
273 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) 274 os.chmod(save_dir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
274 self.logger.info('Non-reproducible packages will be copied to %s', save_dir) 275 self.logger.info('Non-reproducible packages will be copied to %s', save_dir)
275 276
277 # The below bug shows that a few reproducible issues are depends on build dir path length.
278 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=15554
279 # So, the reproducibleA & reproducibleB directories are changed to reproducibleA & reproducibleB-extended to have different size.
280
276 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate) 281 vars_A = self.do_test_build('reproducibleA', self.build_from_sstate)
277 282
278 vars_B = self.do_test_build('reproducibleB', False) 283 vars_B = self.do_test_build('reproducibleB-extended', False)
279 284
280 # NOTE: The temp directories from the reproducible build are purposely 285 # NOTE: The temp directories from the reproducible build are purposely
281 # kept after the build so it can be diffed for debugging. 286 # kept after the build so it can be diffed for debugging.
@@ -330,7 +335,7 @@ class ReproducibleTests(OESelftestTestCase):
330 # Copy jquery to improve the diffoscope output usability 335 # Copy jquery to improve the diffoscope output usability
331 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) 336 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
332 337
333 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size, 338 run_diffoscope('reproducibleA', 'reproducibleB-extended', package_html_dir, max_report_size=self.max_report_size,
334 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) 339 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
335 340
336 if fails: 341 if fails:
diff --git a/meta/lib/oeqa/selftest/cases/retain.py b/meta/lib/oeqa/selftest/cases/retain.py
new file mode 100644
index 0000000000..892be45857
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/retain.py
@@ -0,0 +1,241 @@
1# Tests for retain.bbclass
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8import os
9import glob
10import fnmatch
11import oe.path
12import shutil
13import tarfile
14from oeqa.utils.commands import bitbake, get_bb_vars
15from oeqa.selftest.case import OESelftestTestCase
16
17class Retain(OESelftestTestCase):
18
19 def test_retain_always(self):
20 """
21 Summary: Test retain class with RETAIN_DIRS_ALWAYS
22 Expected: Archive written to RETAIN_OUTDIR when build of test recipe completes
23 Product: oe-core
24 Author: Paul Eggleton <paul.eggleton@microsoft.com>
25 """
26
27 test_recipe = 'quilt-native'
28
29 features = 'INHERIT += "retain"\n'
30 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
31 self.write_config(features)
32
33 bitbake('-c clean %s' % test_recipe)
34
35 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
36 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
37 tmpdir = bb_vars['TMPDIR']
38 if len(retain_outdir) < 5:
39 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
40 if not oe.path.is_path_parent(tmpdir, retain_outdir):
41 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
42 try:
43 shutil.rmtree(retain_outdir)
44 except FileNotFoundError:
45 pass
46
47 bitbake(test_recipe)
48 if not glob.glob(os.path.join(retain_outdir, '%s_temp_*.tar.gz' % test_recipe)):
49 self.fail('No output archive for %s created' % test_recipe)
50
51
52 def test_retain_failure(self):
53 """
54 Summary: Test retain class default behaviour
55 Expected: Archive written to RETAIN_OUTDIR only when build of test
56 recipe fails, and archive contents are as expected
57 Product: oe-core
58 Author: Paul Eggleton <paul.eggleton@microsoft.com>
59 """
60
61 test_recipe_fail = 'error'
62
63 features = 'INHERIT += "retain"\n'
64 self.write_config(features)
65
66 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'RETAIN_DIRS_ALWAYS', 'RETAIN_DIRS_GLOBAL_ALWAYS'])
67 if bb_vars['RETAIN_DIRS_ALWAYS']:
68 self.fail('RETAIN_DIRS_ALWAYS is set, this interferes with the test')
69 if bb_vars['RETAIN_DIRS_GLOBAL_ALWAYS']:
70 self.fail('RETAIN_DIRS_GLOBAL_ALWAYS is set, this interferes with the test')
71 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
72 tmpdir = bb_vars['TMPDIR']
73 if len(retain_outdir) < 5:
74 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
75 if not oe.path.is_path_parent(tmpdir, retain_outdir):
76 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
77
78 try:
79 shutil.rmtree(retain_outdir)
80 except FileNotFoundError:
81 pass
82
83 bitbake('-c clean %s' % test_recipe_fail)
84
85 if os.path.exists(retain_outdir):
86 retain_dirlist = os.listdir(retain_outdir)
87 if retain_dirlist:
88 self.fail('RETAIN_OUTDIR should be empty without failure, contents:\n%s' % '\n'.join(retain_dirlist))
89
90 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
91 if result.status == 0:
92 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
93
94 archives = glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % test_recipe_fail))
95 if not archives:
96 self.fail('No output archive for %s created' % test_recipe_fail)
97 if len(archives) > 1:
98 self.fail('More than one archive for %s created' % test_recipe_fail)
99 for archive in archives:
100 found = False
101 archive_prefix = os.path.basename(archive).split('.tar')[0]
102 expected_prefix_start = '%s_workdir' % test_recipe_fail
103 if not archive_prefix.startswith(expected_prefix_start):
104 self.fail('Archive %s name does not start with expected prefix "%s"' % (os.path.basename(archive), expected_prefix_start))
105 with tarfile.open(archive) as tf:
106 for ti in tf:
107 if not fnmatch.fnmatch(ti.name, '%s/*' % archive_prefix):
108 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
109 if ti.name.endswith('/temp/log.do_compile'):
110 found = True
111 if not found:
112 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
113
114
115 def test_retain_global(self):
116 """
117 Summary: Test retain class RETAIN_DIRS_GLOBAL_* behaviour
118 Expected: Ensure RETAIN_DIRS_GLOBAL_ALWAYS always causes an
119 archive to be created, and RETAIN_DIRS_GLOBAL_FAILURE
120 only causes an archive to be created on failure.
121 Also test archive naming (with : character) as an
122 added bonus.
123 Product: oe-core
124 Author: Paul Eggleton <paul.eggleton@microsoft.com>
125 """
126
127 test_recipe = 'quilt-native'
128 test_recipe_fail = 'error'
129
130 features = 'INHERIT += "retain"\n'
131 features += 'RETAIN_DIRS_GLOBAL_ALWAYS = "${LOG_DIR};prefix=buildlogs"\n'
132 features += 'RETAIN_DIRS_GLOBAL_FAILURE = "${STAMPS_DIR}"\n'
133 self.write_config(features)
134
135 bitbake('-c clean %s' % test_recipe)
136
137 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR', 'STAMPS_DIR'])
138 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
139 tmpdir = bb_vars['TMPDIR']
140 if len(retain_outdir) < 5:
141 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
142 if not oe.path.is_path_parent(tmpdir, retain_outdir):
143 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
144 try:
145 shutil.rmtree(retain_outdir)
146 except FileNotFoundError:
147 pass
148
149 # Test success case
150 bitbake(test_recipe)
151 if not glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz')):
152 self.fail('No output archive for LOG_DIR created')
153 stamps_dir = bb_vars['STAMPS_DIR']
154 if glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
155 self.fail('Output archive for STAMPS_DIR created when it should not have been')
156
157 # Test failure case
158 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
159 if result.status == 0:
160 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
161 if not glob.glob(os.path.join(retain_outdir, '%s_*.tar.gz' % os.path.basename(stamps_dir))):
162 self.fail('Output archive for STAMPS_DIR not created')
163 if len(glob.glob(os.path.join(retain_outdir, 'buildlogs_*.tar.gz'))) != 2:
164 self.fail('Should be exactly two buildlogs archives in output dir')
165
166
167 def test_retain_misc(self):
168 """
169 Summary: Test retain class with RETAIN_ENABLED and RETAIN_TARBALL_SUFFIX
170 Expected: Archive written to RETAIN_OUTDIR only when RETAIN_ENABLED is set
171 and archive contents are as expected. Also test archive naming
172 (with : character) as an added bonus.
173 Product: oe-core
174 Author: Paul Eggleton <paul.eggleton@microsoft.com>
175 """
176
177 test_recipe_fail = 'error'
178
179 features = 'INHERIT += "retain"\n'
180 features += 'RETAIN_DIRS_ALWAYS = "${T}"\n'
181 features += 'RETAIN_ENABLED = "0"\n'
182 self.write_config(features)
183
184 bb_vars = get_bb_vars(['RETAIN_OUTDIR', 'TMPDIR'])
185 retain_outdir = bb_vars['RETAIN_OUTDIR'] or ''
186 tmpdir = bb_vars['TMPDIR']
187 if len(retain_outdir) < 5:
188 self.fail('RETAIN_OUTDIR value "%s" is invalid' % retain_outdir)
189 if not oe.path.is_path_parent(tmpdir, retain_outdir):
190 self.fail('RETAIN_OUTDIR (%s) is not underneath TMPDIR (%s)' % (retain_outdir, tmpdir))
191
192 try:
193 shutil.rmtree(retain_outdir)
194 except FileNotFoundError:
195 pass
196
197 bitbake('-c clean %s' % test_recipe_fail)
198 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
199 if result.status == 0:
200 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
201
202 if os.path.exists(retain_outdir) and os.listdir(retain_outdir):
203 self.fail('RETAIN_OUTDIR should be empty with RETAIN_ENABLED = "0"')
204
205 features = 'INHERIT += "retain"\n'
206 features += 'RETAIN_DIRS_ALWAYS = "${T};prefix=recipelogs"\n'
207 features += 'RETAIN_TARBALL_SUFFIX = "${DATETIME}-testsuffix.tar.bz2"\n'
208 features += 'RETAIN_ENABLED = "1"\n'
209 self.write_config(features)
210
211 result = bitbake('-c compile %s' % test_recipe_fail, ignore_status=True)
212 if result.status == 0:
213 self.fail('Build of %s did not fail as expected' % test_recipe_fail)
214
215 archives = glob.glob(os.path.join(retain_outdir, '%s_*-testsuffix.tar.bz2' % test_recipe_fail))
216 if not archives:
217 self.fail('No output archive for %s created' % test_recipe_fail)
218 if len(archives) != 2:
219 self.fail('Two archives for %s expected, but %d exist' % (test_recipe_fail, len(archives)))
220 recipelogs_found = False
221 workdir_found = False
222 for archive in archives:
223 contents_found = False
224 archive_prefix = os.path.basename(archive).split('.tar')[0]
225 if archive_prefix.startswith('%s_recipelogs' % test_recipe_fail):
226 recipelogs_found = True
227 if archive_prefix.startswith('%s_workdir' % test_recipe_fail):
228 workdir_found = True
229 with tarfile.open(archive, 'r:bz2') as tf:
230 for ti in tf:
231 if not fnmatch.fnmatch(ti.name, '%s/*' % (archive_prefix)):
232 self.fail('File without tarball-named subdirectory within tarball %s: %s' % (os.path.basename(archive), ti.name))
233 if ti.name.endswith('/log.do_compile'):
234 contents_found = True
235 if not contents_found:
236 # Both archives should contain this file
237 self.fail('Did not find log.do_compile in output archive %s' % os.path.basename(archive))
238 if not recipelogs_found:
239 self.fail('No archive with expected "recipelogs" prefix found')
240 if not workdir_found:
241 self.fail('No archive with expected "workdir" prefix found')
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index 12000aac16..27090ae5cd 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -273,7 +273,7 @@ TEST_RUNQEMUPARAMS += " slirp"
273 import subprocess, os 273 import subprocess, os
274 274
275 distro = oe.lsb.distro_identifier() 275 distro = oe.lsb.distro_identifier()
276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or 276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'centos-9', 'ubuntu-16.04', 'ubuntu-18.04'] or
277 distro.startswith('almalinux') or distro.startswith('rocky')): 277 distro.startswith('almalinux') or distro.startswith('rocky')):
278 self.skipTest('virgl headless cannot be tested with %s' %(distro)) 278 self.skipTest('virgl headless cannot be tested with %s' %(distro))
279 279
@@ -310,10 +310,7 @@ class Postinst(OESelftestTestCase):
310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' 310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
311 features += 'PACKAGE_CLASSES = "%s"\n' % classes 311 features += 'PACKAGE_CLASSES = "%s"\n' % classes
312 if init_manager == "systemd": 312 if init_manager == "systemd":
313 features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n' 313 features += 'INIT_MANAGER = "systemd"\n'
314 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
315 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
316 features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
317 self.write_config(features) 314 self.write_config(features)
318 315
319 bitbake('core-image-minimal') 316 bitbake('core-image-minimal')
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
index ad14189c6d..cbe6366f75 100644
--- a/meta/lib/oeqa/selftest/cases/rust.py
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -66,132 +66,45 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
66 # bless: First runs rustfmt to format the codebase, 66 # bless: First runs rustfmt to format the codebase,
67 # then runs tidy checks. 67 # then runs tidy checks.
68 exclude_list = [ 68 exclude_list = [
69 'compiler/rustc', 69 'src/bootstrap',
70 'compiler/rustc_interface/src/tests.rs',
71 'library/panic_abort',
72 'library/panic_unwind',
73 'library/test/src/stats/tests.rs',
74 'src/bootstrap/builder/tests.rs',
75 'src/doc/rustc', 70 'src/doc/rustc',
76 'src/doc/rustdoc', 71 'src/doc/rustdoc',
77 'src/doc/unstable-book', 72 'src/doc/unstable-book',
78 'src/librustdoc', 73 'src/librustdoc',
79 'src/rustdoc-json-types', 74 'src/rustdoc-json-types',
80 'src/tools/compiletest/src/common.rs', 75 'src/tools/compiletest/src/common.rs',
76 'src/tools/jsondoclint',
81 'src/tools/lint-docs', 77 'src/tools/lint-docs',
78 'src/tools/replace-version-placeholder',
82 'src/tools/rust-analyzer', 79 'src/tools/rust-analyzer',
83 'src/tools/rustdoc-themes', 80 'src/tools/rustdoc-themes',
84 'src/tools/tidy', 81 'src/tools/rust-installer',
82 'src/tools/suggest-tests',
83 'src/tools/tidy/src/',
85 'tests/assembly/asm/aarch64-outline-atomics.rs', 84 'tests/assembly/asm/aarch64-outline-atomics.rs',
86 'tests/codegen/abi-main-signature-32bit-c-int.rs', 85 'tests/codegen/abi-main-signature-32bit-c-int.rs',
87 'tests/codegen/abi-repr-ext.rs', 86 'tests/codegen/i128-x86-align.rs',
88 'tests/codegen/abi-x86-interrupt.rs', 87 'tests/codegen/issues/issue-122805.rs',
89 'tests/codegen/branch-protection.rs',
90 'tests/codegen/catch-unwind.rs',
91 'tests/codegen/cf-protection.rs',
92 'tests/codegen/enum-bounds-check-derived-idx.rs',
93 'tests/codegen/force-unwind-tables.rs',
94 'tests/codegen/intrinsic-no-unnamed-attr.rs',
95 'tests/codegen/issues/issue-103840.rs',
96 'tests/codegen/issues/issue-47278.rs',
97 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
98 'tests/codegen/lifetime_start_end.rs',
99 'tests/codegen/local-generics-in-exe-internalized.rs',
100 'tests/codegen/match-unoptimized.rs',
101 'tests/codegen/noalias-rwlockreadguard.rs',
102 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
103 'tests/codegen/noreturn-uninhabited.rs',
104 'tests/codegen/repr-transparent-aggregates-3.rs',
105 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
106 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
107 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
108 'tests/codegen/sse42-implies-crc32.rs',
109 'tests/codegen/thread-local.rs', 88 'tests/codegen/thread-local.rs',
110 'tests/codegen/uninit-consts.rs', 89 'tests/mir-opt/',
111 'tests/pretty/raw-str-nonexpr.rs',
112 'tests/run-make', 90 'tests/run-make',
113 'tests/run-make-fulldeps', 91 'tests/run-make-fulldeps',
114 'tests/rustdoc', 92 'tests/rustdoc',
115 'tests/rustdoc-json', 93 'tests/rustdoc-json',
116 'tests/rustdoc-js-std', 94 'tests/rustdoc-js-std',
117 'tests/rustdoc-ui/cfg-test.rs',
118 'tests/rustdoc-ui/check-cfg-test.rs',
119 'tests/rustdoc-ui/display-output.rs',
120 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
121 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
122 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
123 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
124 'tests/rustdoc-ui/doctest-output.rs',
125 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
126 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
127 'tests/rustdoc-ui/issue-80992.rs',
128 'tests/rustdoc-ui/issue-91134.rs',
129 'tests/rustdoc-ui/nocapture-fail.rs',
130 'tests/rustdoc-ui/nocapture.rs',
131 'tests/rustdoc-ui/no-run-flag.rs',
132 'tests/rustdoc-ui/run-directory.rs',
133 'tests/rustdoc-ui/test-no_std.rs',
134 'tests/rustdoc-ui/test-type.rs',
135 'tests/rustdoc/unit-return.rs',
136 'tests/ui/abi/stack-probes-lto.rs', 95 'tests/ui/abi/stack-probes-lto.rs',
137 'tests/ui/abi/stack-probes.rs', 96 'tests/ui/abi/stack-probes.rs',
138 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs', 97 'tests/ui/codegen/mismatched-data-layouts.rs',
139 'tests/ui/asm/x86_64/sym.rs',
140 'tests/ui/associated-type-bounds/fn-apit.rs',
141 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
142 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
143 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs', 98 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
144 'tests/ui/drop/dynamic-drop.rs', 99 'tests/ui-fulldeps/',
145 'tests/ui/empty_global_asm.rs',
146 'tests/ui/functions-closures/fn-help-with-err.rs',
147 'tests/ui/linkage-attr/issue-10755.rs',
148 'tests/ui/macros/restricted-shadowing-legacy.rs',
149 'tests/ui/process/nofile-limit.rs', 100 'tests/ui/process/nofile-limit.rs',
150 'tests/ui/process/process-panic-after-fork.rs',
151 'tests/ui/process/process-sigpipe.rs',
152 'tests/ui/simd/target-feature-mixup.rs',
153 'tests/ui/structs-enums/multiple-reprs.rs', 101 'tests/ui/structs-enums/multiple-reprs.rs',
154 'src/tools/jsondoclint', 102 'tidyselftest'
155 'src/tools/replace-version-placeholder',
156 'tests/codegen/abi-efiapi.rs',
157 'tests/codegen/abi-sysv64.rs',
158 'tests/codegen/align-byval.rs',
159 'tests/codegen/align-fn.rs',
160 'tests/codegen/asm-powerpc-clobbers.rs',
161 'tests/codegen/async-fn-debug-awaitee-field.rs',
162 'tests/codegen/binary-search-index-no-bound-check.rs',
163 'tests/codegen/call-metadata.rs',
164 'tests/codegen/debug-column.rs',
165 'tests/codegen/debug-limited.rs',
166 'tests/codegen/debuginfo-generic-closure-env-names.rs',
167 'tests/codegen/drop.rs',
168 'tests/codegen/dst-vtable-align-nonzero.rs',
169 'tests/codegen/enable-lto-unit-splitting.rs',
170 'tests/codegen/enum/enum-u128.rs',
171 'tests/codegen/fn-impl-trait-self.rs',
172 'tests/codegen/inherit_overflow.rs',
173 'tests/codegen/inline-function-args-debug-info.rs',
174 'tests/codegen/intrinsics/mask.rs',
175 'tests/codegen/intrinsics/transmute-niched.rs',
176 'tests/codegen/issues/issue-73258.rs',
177 'tests/codegen/issues/issue-75546.rs',
178 'tests/codegen/issues/issue-77812.rs',
179 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
180 'tests/codegen/llvm-ident.rs',
181 'tests/codegen/mainsubprogram.rs',
182 'tests/codegen/move-operands.rs',
183 'tests/codegen/repr/transparent-mips64.rs',
184 'tests/mir-opt/',
185 'tests/rustdoc-json',
186 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
187 'tests/rustdoc-ui/no-run-flag.rs',
188 'tests/ui-fulldeps/',
189 'tests/ui/numbers-arithmetic/u128.rs'
190 ] 103 ]
191 104
192 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list]) 105 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
193 # Add exclude_fail_tests with other test arguments 106 # Add exclude_fail_tests with other test arguments
194 testargs = exclude_fail_tests + " --doc --no-fail-fast --bless" 107 testargs = exclude_fail_tests + " --no-fail-fast --bless"
195 108
196 # wrap the execution with a qemu instance. 109 # wrap the execution with a qemu instance.
197 # Tests are run with 512 tasks in parallel to execute all tests very quickly 110 # Tests are run with 512 tasks in parallel to execute all tests very quickly
@@ -210,9 +123,8 @@ class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
210 tmpdir = get_bb_var("TMPDIR", "rust") 123 tmpdir = get_bb_var("TMPDIR", "rust")
211 124
212 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools. 125 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
213 cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath 126 cmd = "export TARGET_VENDOR=\"-poky\";"
214 cmd = cmd + " export TARGET_VENDOR=\"-poky\";" 127 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/python3-native:%s/recipe-sysroot-native/usr/bin:%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, rustlibpath, rustlibpath, tcpath, tmpdir)
215 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
216 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath 128 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
217 # Trigger testing. 129 # Trigger testing.
218 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip 130 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
index 05fc4e390b..be595babb3 100644
--- a/meta/lib/oeqa/selftest/cases/spdx.py
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -6,29 +6,37 @@
6 6
7import json 7import json
8import os 8import os
9import textwrap
10from pathlib import Path
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_var, runCmd 12from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
11 13
12class SPDXCheck(OESelftestTestCase):
13 14
15class SPDX22Check(OESelftestTestCase):
14 @classmethod 16 @classmethod
15 def setUpClass(cls): 17 def setUpClass(cls):
16 super(SPDXCheck, cls).setUpClass() 18 super().setUpClass()
17 bitbake("python3-spdx-tools-native") 19 bitbake("python3-spdx-tools-native")
18 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native") 20 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
19 21
20 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name): 22 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
21 config = """ 23 config = textwrap.dedent(
22INHERIT += "create-spdx" 24 """\
23""" 25 INHERIT:remove = "create-spdx"
26 INHERIT += "create-spdx-2.2"
27 """
28 )
24 self.write_config(config) 29 self.write_config(config)
25 30
26 deploy_dir = get_bb_var("DEPLOY_DIR") 31 deploy_dir = get_bb_var("DEPLOY_DIR")
27 machine_var = get_bb_var("MACHINE") 32 machine_var = get_bb_var("MACHINE")
33 spdx_version = get_bb_var("SPDX_VERSION")
28 # qemux86-64 creates the directory qemux86_64 34 # qemux86-64 creates the directory qemux86_64
29 machine_dir = machine_var.replace("-", "_") 35 machine_dir = machine_var.replace("-", "_")
30 36
31 full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file) 37 full_file_path = os.path.join(
38 deploy_dir, "spdx", spdx_version, machine_dir, high_level_dir, spdx_file
39 )
32 40
33 try: 41 try:
34 os.remove(full_file_path) 42 os.remove(full_file_path)
@@ -43,8 +51,13 @@ INHERIT += "create-spdx"
43 self.assertNotEqual(report, None) 51 self.assertNotEqual(report, None)
44 self.assertNotEqual(report["SPDXID"], None) 52 self.assertNotEqual(report["SPDXID"], None)
45 53
46 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3') 54 python = os.path.join(
47 validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools') 55 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"),
56 "nativepython3",
57 )
58 validator = os.path.join(
59 get_bb_var("STAGING_BINDIR", "python3-spdx-tools-native"), "pyspdxtools"
60 )
48 result = runCmd("{} {} -i {}".format(python, validator, filename)) 61 result = runCmd("{} {} -i {}".format(python, validator, filename))
49 62
50 self.assertExists(full_file_path) 63 self.assertExists(full_file_path)
@@ -52,3 +65,106 @@ INHERIT += "create-spdx"
52 65
53 def test_spdx_base_files(self): 66 def test_spdx_base_files(self):
54 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files") 67 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
68
69
70class SPDX3CheckBase(object):
71 """
72 Base class for checking SPDX 3 based tests
73 """
74
75 def check_spdx_file(self, filename):
76 import oe.spdx30
77
78 self.assertExists(filename)
79
80 # Read the file
81 objset = oe.spdx30.SHACLObjectSet()
82 with open(filename, "r") as f:
83 d = oe.spdx30.JSONLDDeserializer()
84 d.read(f, objset)
85
86 return objset
87
88 def check_recipe_spdx(self, target_name, spdx_path, *, task=None, extraconf=""):
89 config = textwrap.dedent(
90 f"""\
91 INHERIT:remove = "create-spdx"
92 INHERIT += "{self.SPDX_CLASS}"
93 {extraconf}
94 """
95 )
96 self.write_config(config)
97
98 if task:
99 bitbake(f"-c {task} {target_name}")
100 else:
101 bitbake(target_name)
102
103 filename = spdx_path.format(
104 **get_bb_vars(
105 [
106 "DEPLOY_DIR_IMAGE",
107 "DEPLOY_DIR_SPDX",
108 "MACHINE",
109 "MACHINE_ARCH",
110 "SDKMACHINE",
111 "SDK_DEPLOY",
112 "SPDX_VERSION",
113 "TOOLCHAIN_OUTPUTNAME",
114 ],
115 target_name,
116 )
117 )
118
119 return self.check_spdx_file(filename)
120
121 def check_objset_missing_ids(self, objset):
122 if objset.missing_ids:
123 self.assertTrue(
124 False,
125 "The following SPDXIDs are unresolved:\n "
126 + "\n ".join(objset.missing_ids),
127 )
128
129
130class SPDX30Check(SPDX3CheckBase, OESelftestTestCase):
131 SPDX_CLASS = "create-spdx-3.0"
132
133 def test_base_files(self):
134 self.check_recipe_spdx(
135 "base-files",
136 "{DEPLOY_DIR_SPDX}/{MACHINE_ARCH}/packages/base-files.spdx.json",
137 )
138
139 def test_core_image_minimal(self):
140 objset = self.check_recipe_spdx(
141 "core-image-minimal",
142 "{DEPLOY_DIR_IMAGE}/core-image-minimal-{MACHINE}.rootfs.spdx.json",
143 )
144
145 # Document should be fully linked
146 self.check_objset_missing_ids(objset)
147
148 def test_core_image_minimal_sdk(self):
149 objset = self.check_recipe_spdx(
150 "core-image-minimal",
151 "{SDK_DEPLOY}/{TOOLCHAIN_OUTPUTNAME}.spdx.json",
152 task="populate_sdk",
153 )
154
155 # Document should be fully linked
156 self.check_objset_missing_ids(objset)
157
158 def test_baremetal_helloworld(self):
159 objset = self.check_recipe_spdx(
160 "baremetal-helloworld",
161 "{DEPLOY_DIR_IMAGE}/baremetal-helloworld-image-{MACHINE}.spdx.json",
162 extraconf=textwrap.dedent(
163 """\
164 TCLIBC = "baremetal"
165 """
166 ),
167 )
168
169 # Document should be fully linked
170 self.check_objset_missing_ids(objset)
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index 86d6cd7464..ae295bef5f 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -378,7 +378,6 @@ class SStateHashSameSigs(SStateBase):
378 self.write_config(""" 378 self.write_config("""
379MACHINE = "qemux86" 379MACHINE = "qemux86"
380TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 380TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
381TCLIBCAPPEND = ""
382BUILD_ARCH = "x86_64" 381BUILD_ARCH = "x86_64"
383BUILD_OS = "linux" 382BUILD_OS = "linux"
384SDKMACHINE = "x86_64" 383SDKMACHINE = "x86_64"
@@ -390,7 +389,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
390 self.write_config(""" 389 self.write_config("""
391MACHINE = "qemux86" 390MACHINE = "qemux86"
392TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 391TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
393TCLIBCAPPEND = ""
394BUILD_ARCH = "i686" 392BUILD_ARCH = "i686"
395BUILD_OS = "linux" 393BUILD_OS = "linux"
396SDKMACHINE = "i686" 394SDKMACHINE = "i686"
@@ -426,7 +424,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
426 424
427 self.write_config(""" 425 self.write_config("""
428TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 426TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
429TCLIBCAPPEND = \"\"
430NATIVELSBSTRING = \"DistroA\" 427NATIVELSBSTRING = \"DistroA\"
431BB_SIGNATURE_HANDLER = "OEBasicHash" 428BB_SIGNATURE_HANDLER = "OEBasicHash"
432""") 429""")
@@ -434,7 +431,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
434 bitbake("core-image-weston -S none") 431 bitbake("core-image-weston -S none")
435 self.write_config(""" 432 self.write_config("""
436TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 433TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
437TCLIBCAPPEND = \"\"
438NATIVELSBSTRING = \"DistroB\" 434NATIVELSBSTRING = \"DistroB\"
439BB_SIGNATURE_HANDLER = "OEBasicHash" 435BB_SIGNATURE_HANDLER = "OEBasicHash"
440""") 436""")
@@ -463,17 +459,17 @@ class SStateHashSameSigs2(SStateBase):
463 459
464 configA = """ 460 configA = """
465TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 461TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
466TCLIBCAPPEND = \"\"
467MACHINE = \"qemux86-64\" 462MACHINE = \"qemux86-64\"
468BB_SIGNATURE_HANDLER = "OEBasicHash" 463BB_SIGNATURE_HANDLER = "OEBasicHash"
469""" 464"""
470 #OLDEST_KERNEL is arch specific so set to a different value here for testing 465 #OLDEST_KERNEL is arch specific so set to a different value here for testing
471 configB = """ 466 configB = """
472TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 467TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
473TCLIBCAPPEND = \"\"
474MACHINE = \"qemuarm\" 468MACHINE = \"qemuarm\"
475OLDEST_KERNEL = \"3.3.0\" 469OLDEST_KERNEL = \"3.3.0\"
476BB_SIGNATURE_HANDLER = "OEBasicHash" 470BB_SIGNATURE_HANDLER = "OEBasicHash"
471ERROR_QA:append = " somenewoption"
472WARN_QA:append = " someotheroption"
477""" 473"""
478 self.sstate_common_samesigs(configA, configB, allarch=True) 474 self.sstate_common_samesigs(configA, configB, allarch=True)
479 475
@@ -484,7 +480,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
484 480
485 configA = """ 481 configA = """
486TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 482TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
487TCLIBCAPPEND = \"\"
488MACHINE = \"qemux86-64\" 483MACHINE = \"qemux86-64\"
489require conf/multilib.conf 484require conf/multilib.conf
490MULTILIBS = \"multilib:lib32\" 485MULTILIBS = \"multilib:lib32\"
@@ -493,7 +488,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
493""" 488"""
494 configB = """ 489 configB = """
495TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 490TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
496TCLIBCAPPEND = \"\"
497MACHINE = \"qemuarm\" 491MACHINE = \"qemuarm\"
498require conf/multilib.conf 492require conf/multilib.conf
499MULTILIBS = \"\" 493MULTILIBS = \"\"
@@ -511,7 +505,6 @@ class SStateHashSameSigs3(SStateBase):
511 505
512 self.write_config(""" 506 self.write_config("""
513TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 507TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
514TCLIBCAPPEND = \"\"
515MACHINE = \"qemux86\" 508MACHINE = \"qemux86\"
516require conf/multilib.conf 509require conf/multilib.conf
517MULTILIBS = "multilib:lib32" 510MULTILIBS = "multilib:lib32"
@@ -522,7 +515,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
522 bitbake("world meta-toolchain -S none") 515 bitbake("world meta-toolchain -S none")
523 self.write_config(""" 516 self.write_config("""
524TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 517TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
525TCLIBCAPPEND = \"\"
526MACHINE = \"qemux86copy\" 518MACHINE = \"qemux86copy\"
527require conf/multilib.conf 519require conf/multilib.conf
528MULTILIBS = "multilib:lib32" 520MULTILIBS = "multilib:lib32"
@@ -559,7 +551,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
559 551
560 self.write_config(""" 552 self.write_config("""
561TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" 553TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\"
562TCLIBCAPPEND = \"\"
563MACHINE = \"qemux86\" 554MACHINE = \"qemux86\"
564require conf/multilib.conf 555require conf/multilib.conf
565MULTILIBS = "multilib:lib32" 556MULTILIBS = "multilib:lib32"
@@ -570,7 +561,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
570 bitbake("binutils-native -S none") 561 bitbake("binutils-native -S none")
571 self.write_config(""" 562 self.write_config("""
572TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 563TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
573TCLIBCAPPEND = \"\"
574MACHINE = \"qemux86copy\" 564MACHINE = \"qemux86copy\"
575BB_SIGNATURE_HANDLER = "OEBasicHash" 565BB_SIGNATURE_HANDLER = "OEBasicHash"
576""") 566""")
@@ -598,7 +588,6 @@ class SStateHashSameSigs4(SStateBase):
598 588
599 self.write_config(""" 589 self.write_config("""
600TMPDIR = "${TOPDIR}/tmp-sstatesamehash" 590TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
601TCLIBCAPPEND = ""
602BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}" 591BB_NUMBER_THREADS = "${@oe.utils.cpu_count()}"
603PARALLEL_MAKE = "-j 1" 592PARALLEL_MAKE = "-j 1"
604DL_DIR = "${TOPDIR}/download1" 593DL_DIR = "${TOPDIR}/download1"
@@ -613,7 +602,6 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
613 bitbake("world meta-toolchain -S none") 602 bitbake("world meta-toolchain -S none")
614 self.write_config(""" 603 self.write_config("""
615TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 604TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
616TCLIBCAPPEND = ""
617BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}" 605BB_NUMBER_THREADS = "${@oe.utils.cpu_count()+1}"
618PARALLEL_MAKE = "-j 2" 606PARALLEL_MAKE = "-j 2"
619DL_DIR = "${TOPDIR}/download2" 607DL_DIR = "${TOPDIR}/download2"
@@ -724,7 +712,6 @@ class SStateFindSiginfo(SStateBase):
724 """ 712 """
725 self.write_config(""" 713 self.write_config("""
726TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\" 714TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
727TCLIBCAPPEND = \"\"
728MACHINE = \"qemux86-64\" 715MACHINE = \"qemux86-64\"
729require conf/multilib.conf 716require conf/multilib.conf
730MULTILIBS = "multilib:lib32" 717MULTILIBS = "multilib:lib32"
@@ -917,15 +904,24 @@ INHERIT += "base-do-configure-modified"
917""", 904""",
918expected_sametmp_output, expected_difftmp_output) 905expected_sametmp_output, expected_difftmp_output)
919 906
920@OETestTag("yocto-mirrors") 907class SStateCheckObjectPresence(SStateBase):
921class SStateMirrors(SStateBase): 908 def check_bb_output(self, output, targets, exceptions, check_cdn):
922 def check_bb_output(self, output, exceptions, check_cdn):
923 def is_exception(object, exceptions): 909 def is_exception(object, exceptions):
924 for e in exceptions: 910 for e in exceptions:
925 if re.search(e, object): 911 if re.search(e, object):
926 return True 912 return True
927 return False 913 return False
928 914
915 # sstate is checked for existence of these, but they never get written out to begin with
916 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
917 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
918 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
919 exceptions += ["linux-yocto.*shared_workdir"]
920 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
921 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
922 # which makes tracing other changes difficult
923 exceptions += ["{}.*create_.*spdx".format(t) for t in targets.split()]
924
929 output_l = output.splitlines() 925 output_l = output.splitlines()
930 for l in output_l: 926 for l in output_l:
931 if l.startswith("Sstate summary"): 927 if l.startswith("Sstate summary"):
@@ -960,18 +956,9 @@ class SStateMirrors(SStateBase):
960 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) 956 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
961 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo))) 957 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
962 958
959@OETestTag("yocto-mirrors")
960class SStateMirrors(SStateCheckObjectPresence):
963 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False): 961 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
964 # sstate is checked for existence of these, but they never get written out to begin with
965 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
966 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
967 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
968 exceptions += ["linux-yocto.*shared_workdir"]
969 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
970 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
971 # which makes tracing other changes difficult
972 exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
973 exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
974
975 if check_cdn: 962 if check_cdn:
976 self.config_sstate(True) 963 self.config_sstate(True)
977 self.append_config(""" 964 self.append_config("""
@@ -987,7 +974,7 @@ MACHINE = "{}"
987 bitbake("-S none {}".format(targets)) 974 bitbake("-S none {}".format(targets))
988 if ignore_errors: 975 if ignore_errors:
989 return 976 return
990 self.check_bb_output(result.output, exceptions, check_cdn) 977 self.check_bb_output(result.output, targets, exceptions, check_cdn)
991 978
992 def test_cdn_mirror_qemux86_64(self): 979 def test_cdn_mirror_qemux86_64(self):
993 exceptions = [] 980 exceptions = []
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 99186175e5..acc3b073bd 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -117,8 +117,11 @@ class OESelftestTestContext(OETestContext):
117 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath) 117 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
118 f.write(newbblayers) 118 f.write(newbblayers)
119 119
120 # Rewrite builddir paths seen in environment variables
120 for e in os.environ: 121 for e in os.environ:
121 if builddir + "/" in os.environ[e]: 122 # Rewrite paths that absolutely point inside builddir
123 # (e.g $builddir/conf/ would be rewritten but not $builddir/../bitbake/)
124 if builddir + "/" in os.environ[e] and builddir + "/" in os.path.abspath(os.environ[e]):
122 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") 125 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
123 if os.environ[e].endswith(builddir): 126 if os.environ[e].endswith(builddir):
124 os.environ[e] = os.environ[e].replace(builddir, newbuilddir) 127 os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 53bdcbf266..e03f7e33bb 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -96,4 +96,10 @@ def get_json_result_dir(d):
96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR") 96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
97 if custom_json_result_dir: 97 if custom_json_result_dir:
98 json_result_dir = custom_json_result_dir 98 json_result_dir = custom_json_result_dir
99 return json_result_dir \ No newline at end of file 99 return json_result_dir
100
101def get_artefact_dir(d):
102 custom_json_result_dir = d.getVar("OEQA_ARTEFACT_DIR")
103 if custom_json_result_dir:
104 return custom_json_result_dir
105 return os.path.join(d.getVar("LOG_DIR"), 'oeqa-artefacts')
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index 575e380017..bf2f49d0c0 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -314,7 +314,23 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
314@contextlib.contextmanager 314@contextlib.contextmanager
315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
316 """ 316 """
317 launch_cmd means directly run the command, don't need set rootfs or env vars. 317 Starts a context manager for a 'oeqa.targetcontrol.QemuTarget' resource.
318 The underlying Qemu will be booted into a shell when the generator yields
319 and stopped when the 'with' block exits.
320
321 Usage:
322
323 with runqemu('core-image-minimal') as qemu:
324 qemu.run_serial('cat /proc/cpuinfo')
325
326 Args:
327 pn (str): (image) recipe to run on
328 ssh (boolean): whether or not to enable SSH (network access)
329 runqemuparams (str): space-separated list of params to pass to 'runqemu' script (like 'nographics', 'ovmf', etc.)
330 image_fstype (str): IMAGE_FSTYPE to use
331 launch_cmd (str): directly run this command and bypass automatic runqemu parameter generation
332 overrides (dict): dict of "'<bitbake-variable>': value" pairs that allows overriding bitbake variables
333 discard_writes (boolean): enables qemu -snapshot feature to prevent modifying original image
318 """ 334 """
319 335
320 import bb.tinfoil 336 import bb.tinfoil
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
index ecdddd2d40..8f787838b9 100644
--- a/meta/lib/oeqa/utils/postactions.py
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -7,23 +7,20 @@
7# Run a set of actions after tests. The runner provides internal data 7# Run a set of actions after tests. The runner provides internal data
8# dictionary as well as test context to any action to run. 8# dictionary as well as test context to any action to run.
9 9
10from oeqa.utils import get_json_result_dir 10import datetime
11 11import io
12def create_artifacts_directory(d, tc): 12import os
13 import shutil 13import stat
14 14import subprocess
15 local_artifacts_dir = os.path.join(get_json_result_dir(d), "artifacts") 15import tempfile
16 if os.path.isdir(local_artifacts_dir): 16from oeqa.utils import get_artefact_dir
17 shutil.rmtree(local_artifacts_dir)
18
19 os.makedirs(local_artifacts_dir)
20 17
21################################################################## 18##################################################################
22# Host/target statistics 19# Host/target statistics
23################################################################## 20##################################################################
24 21
25def get_target_disk_usage(d, tc): 22def get_target_disk_usage(d, tc, artifacts_list, outputdir):
26 output_file = os.path.join(get_json_result_dir(d), "artifacts", "target_disk_usage.txt") 23 output_file = os.path.join(outputdir, "target_disk_usage.txt")
27 try: 24 try:
28 (status, output) = tc.target.run('df -h') 25 (status, output) = tc.target.run('df -h')
29 with open(output_file, 'w') as f: 26 with open(output_file, 'w') as f:
@@ -32,10 +29,10 @@ def get_target_disk_usage(d, tc):
32 except Exception as e: 29 except Exception as e:
33 bb.warn(f"Can not get target disk usage: {e}") 30 bb.warn(f"Can not get target disk usage: {e}")
34 31
35def get_host_disk_usage(d, tc): 32def get_host_disk_usage(d, tc, artifacts_list, outputdir):
36 import subprocess 33 import subprocess
37 34
38 output_file = os.path.join(get_json_result_dir(d), "artifacts", "host_disk_usage.txt") 35 output_file = os.path.join(outputdir, "host_disk_usage.txt")
39 try: 36 try:
40 with open(output_file, 'w') as f: 37 with open(output_file, 'w') as f:
41 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={}) 38 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
@@ -61,25 +58,21 @@ def get_artifacts_list(target, raw_list):
61 58
62 return result 59 return result
63 60
64def retrieve_test_artifacts(target, artifacts_list, target_dir): 61def list_and_fetch_failed_tests_artifacts(d, tc, artifacts_list, outputdir):
65 local_artifacts_dir = os.path.join(target_dir, "artifacts") 62 artifacts_list = get_artifacts_list(tc.target, artifacts_list)
66 for artifact_path in artifacts_list:
67 if not os.path.isabs(artifact_path):
68 bb.warn(f"{artifact_path} is not an absolute path")
69 continue
70 try:
71 dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:]))
72 os.makedirs(dest_dir, exist_ok=True)
73 target.copyFrom(artifact_path, dest_dir)
74 except Exception as e:
75 bb.warn(f"Can not retrieve {artifact_path} from test target: {e}")
76
77def list_and_fetch_failed_tests_artifacts(d, tc):
78 artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS"))
79 if not artifacts_list: 63 if not artifacts_list:
80 bb.warn("Could not load artifacts list, skip artifacts retrieval") 64 bb.warn("Could not load artifacts list, skip artifacts retrieval")
81 else: 65 return
82 retrieve_test_artifacts(tc.target, artifacts_list, get_json_result_dir(d)) 66 try:
67 cmd = "tar zcf - " + " ".join(artifacts_list)
68 (status, output) = tc.target.run(cmd, raw = True)
69 if status != 0 or not output:
70 raise Exception("Error while fetching compressed artifacts")
71 archive_name = os.path.join(outputdir, "tests_artifacts.tar.gz")
72 with open(archive_name, "wb") as f:
73 f.write(output)
74 except Exception as e:
75 bb.warn(f"Can not retrieve artifacts from test target: {e}")
83 76
84 77
85################################################################## 78##################################################################
@@ -87,12 +80,22 @@ def list_and_fetch_failed_tests_artifacts(d, tc):
87################################################################## 80##################################################################
88 81
89def run_failed_tests_post_actions(d, tc): 82def run_failed_tests_post_actions(d, tc):
83 artifacts = d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS")
84 # Allow all the code to be disabled by having no artifacts set, e.g. for systems with no ssh support
85 if not artifacts:
86 return
87
88 outputdir = get_artefact_dir(d)
89 os.makedirs(outputdir, exist_ok=True)
90 datestr = datetime.datetime.now().strftime('%Y%m%d')
91 outputdir = tempfile.mkdtemp(prefix='oeqa-target-artefacts-%s-' % datestr, dir=outputdir)
92 os.chmod(outputdir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
93
90 post_actions=[ 94 post_actions=[
91 create_artifacts_directory,
92 list_and_fetch_failed_tests_artifacts, 95 list_and_fetch_failed_tests_artifacts,
93 get_target_disk_usage, 96 get_target_disk_usage,
94 get_host_disk_usage 97 get_host_disk_usage
95 ] 98 ]
96 99
97 for action in post_actions: 100 for action in post_actions:
98 action(d, tc) 101 action(d, tc, artifacts, outputdir)