summaryrefslogtreecommitdiffstats
path: root/meta/lib/oeqa
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oeqa')
-rw-r--r--meta/lib/oeqa/buildperf/base.py2
-rw-r--r--meta/lib/oeqa/controllers/__init__.py2
-rw-r--r--meta/lib/oeqa/controllers/controllerimage.py (renamed from meta/lib/oeqa/controllers/masterimage.py)44
-rw-r--r--meta/lib/oeqa/controllers/testtargetloader.py2
-rw-r--r--meta/lib/oeqa/core/case.py9
-rw-r--r--meta/lib/oeqa/core/context.py2
-rw-r--r--meta/lib/oeqa/core/decorator/__init__.py11
-rw-r--r--meta/lib/oeqa/core/decorator/data.py76
-rw-r--r--meta/lib/oeqa/core/decorator/oetimeout.py5
-rw-r--r--meta/lib/oeqa/core/loader.py12
-rw-r--r--meta/lib/oeqa/core/runner.py12
-rw-r--r--meta/lib/oeqa/core/target/qemu.py40
-rw-r--r--meta/lib/oeqa/core/target/ssh.py81
-rw-r--r--meta/lib/oeqa/core/tests/cases/timeout.py13
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_data.py2
-rwxr-xr-xmeta/lib/oeqa/core/tests/test_decorators.py6
-rw-r--r--meta/lib/oeqa/core/utils/concurrencytest.py68
-rw-r--r--meta/lib/oeqa/core/utils/misc.py47
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml20
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE201
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT25
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml8
-rw-r--r--meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs48
-rw-r--r--meta/lib/oeqa/files/test.rs2
-rw-r--r--meta/lib/oeqa/files/testresults/testresults.json2
-rw-r--r--meta/lib/oeqa/manual/bsp-hw.json280
-rw-r--r--meta/lib/oeqa/manual/build-appliance.json2
-rw-r--r--meta/lib/oeqa/manual/eclipse-plugin.json6
-rw-r--r--meta/lib/oeqa/manual/sdk.json2
-rw-r--r--meta/lib/oeqa/manual/toaster-managed-mode.json16
-rw-r--r--meta/lib/oeqa/oetest.py24
-rw-r--r--meta/lib/oeqa/runtime/cases/_qemutiny.py13
-rw-r--r--meta/lib/oeqa/runtime/cases/apt.py40
-rw-r--r--meta/lib/oeqa/runtime/cases/boot.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/buildcpio.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/buildgalculator.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/connman.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/date.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/df.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/dnf.py88
-rw-r--r--meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py8
-rw-r--r--meta/lib/oeqa/runtime/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/gi.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/go.py21
-rw-r--r--meta/lib/oeqa/runtime/cases/gstreamer.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/kernelmodule.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ksample.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/ldd.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/login.py116
-rw-r--r--meta/lib/oeqa/runtime/cases/logrotate.py16
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp.py28
-rw-r--r--meta/lib/oeqa/runtime/cases/ltp_stress.py3
-rw-r--r--meta/lib/oeqa/runtime/cases/maturin.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/multilib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/oe_syslog.py15
-rw-r--r--meta/lib/oeqa/runtime/cases/opkg.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/pam.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt62
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt27
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt19
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt6
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt4
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt2
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt10
l---------meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt1
-rw-r--r--meta/lib/oeqa/runtime/cases/parselogs.py382
-rw-r--r--meta/lib/oeqa/runtime/cases/perl.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/ping.py25
-rw-r--r--meta/lib/oeqa/runtime/cases/ptest.py14
-rw-r--r--meta/lib/oeqa/runtime/cases/python.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/rpm.py58
-rw-r--r--meta/lib/oeqa/runtime/cases/rt.py19
-rw-r--r--meta/lib/oeqa/runtime/cases/rtc.py17
-rw-r--r--meta/lib/oeqa/runtime/cases/runlevel.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/rust.py64
-rw-r--r--meta/lib/oeqa/runtime/cases/scons.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/scp.py4
-rw-r--r--meta/lib/oeqa/runtime/cases/skeletoninit.py6
-rw-r--r--meta/lib/oeqa/runtime/cases/ssh.py9
-rw-r--r--meta/lib/oeqa/runtime/cases/stap.py41
-rw-r--r--meta/lib/oeqa/runtime/cases/storage.py18
-rw-r--r--meta/lib/oeqa/runtime/cases/suspend.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/systemd.py30
-rw-r--r--meta/lib/oeqa/runtime/cases/terminal.py5
-rw-r--r--meta/lib/oeqa/runtime/cases/usb_hid.py7
-rw-r--r--meta/lib/oeqa/runtime/cases/weston.py22
-rw-r--r--meta/lib/oeqa/runtime/cases/x32lib.py2
-rw-r--r--meta/lib/oeqa/runtime/cases/xorg.py2
-rw-r--r--meta/lib/oeqa/runtime/context.py48
-rw-r--r--meta/lib/oeqa/runtime/decorator/package.py18
-rw-r--r--meta/lib/oeqa/runtime/files/hello.stp1
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/README2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/build.py32
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/gcc.py31
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/https.py22
-rw-r--r--meta/lib/oeqa/sdk/buildtools-cases/sanity.py24
-rw-r--r--meta/lib/oeqa/sdk/buildtools-docs-cases/README2
-rw-r--r--meta/lib/oeqa/sdk/buildtools-docs-cases/build.py19
-rw-r--r--meta/lib/oeqa/sdk/cases/assimp.py13
-rw-r--r--meta/lib/oeqa/sdk/cases/buildcpio.py9
-rw-r--r--meta/lib/oeqa/sdk/cases/buildepoxy.py9
-rw-r--r--meta/lib/oeqa/sdk/cases/buildgalculator.py5
-rw-r--r--meta/lib/oeqa/sdk/cases/buildlzip.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/gcc.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/maturin.py79
-rw-r--r--meta/lib/oeqa/sdk/cases/perl.py2
-rw-r--r--meta/lib/oeqa/sdk/cases/python.py13
-rw-r--r--meta/lib/oeqa/sdk/cases/rust.py57
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml6
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/build.rs3
-rw-r--r--meta/lib/oeqa/sdk/files/rust/hello/src/main.rs3
-rw-r--r--meta/lib/oeqa/sdk/testmetaidesupport.py45
-rw-r--r--meta/lib/oeqa/sdk/testsdk.py14
-rw-r--r--meta/lib/oeqa/sdkext/cases/devtool.py2
-rw-r--r--meta/lib/oeqa/sdkext/testsdk.py7
-rw-r--r--meta/lib/oeqa/selftest/case.py19
-rw-r--r--meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/archiver.py56
-rw-r--r--meta/lib/oeqa/selftest/cases/baremetal.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/bblayers.py130
-rw-r--r--meta/lib/oeqa/selftest/cases/bblock.py203
-rw-r--r--meta/lib/oeqa/selftest/cases/bblogging.py182
-rw-r--r--meta/lib/oeqa/selftest/cases/bbtests.py129
-rw-r--r--meta/lib/oeqa/selftest/cases/binutils.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/buildhistory.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/buildoptions.py70
-rw-r--r--meta/lib/oeqa/selftest/cases/c_cpp.py60
-rw-r--r--meta/lib/oeqa/selftest/cases/containerimage.py8
-rw-r--r--meta/lib/oeqa/selftest/cases/cve_check.py208
-rw-r--r--meta/lib/oeqa/selftest/cases/debuginfod.py158
-rw-r--r--meta/lib/oeqa/selftest/cases/devtool.py1254
-rw-r--r--meta/lib/oeqa/selftest/cases/distrodata.py17
-rw-r--r--meta/lib/oeqa/selftest/cases/efibootpartition.py47
-rw-r--r--meta/lib/oeqa/selftest/cases/esdk.py (renamed from meta/lib/oeqa/selftest/cases/eSDK.py)10
-rw-r--r--meta/lib/oeqa/selftest/cases/externalsrc.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py69
-rw-r--r--meta/lib/oeqa/selftest/cases/fitimage.py537
-rw-r--r--meta/lib/oeqa/selftest/cases/gcc.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/gdbserver.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/gitarchivetests.py136
-rw-r--r--meta/lib/oeqa/selftest/cases/glibc.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/gotoolchain.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/image_typedep.py6
-rw-r--r--meta/lib/oeqa/selftest/cases/imagefeatures.py174
-rw-r--r--meta/lib/oeqa/selftest/cases/incompatible_lic.py139
-rw-r--r--meta/lib/oeqa/selftest/cases/intercept.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/kerneldevelopment.py9
-rw-r--r--meta/lib/oeqa/selftest/cases/layerappend.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/liboe.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/lic_checksum.py25
-rw-r--r--meta/lib/oeqa/selftest/cases/locales.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/manifest.py4
-rw-r--r--meta/lib/oeqa/selftest/cases/meta_ide.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/minidebuginfo.py44
-rw-r--r--meta/lib/oeqa/selftest/cases/multiconfig.py21
-rw-r--r--meta/lib/oeqa/selftest/cases/newlib.py13
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/buildhistory.py26
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/elf.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/license.py24
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/path.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/types.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/oelib/utils.py5
-rw-r--r--meta/lib/oeqa/selftest/cases/oescripts.py47
-rw-r--r--meta/lib/oeqa/selftest/cases/overlayfs.py502
-rw-r--r--meta/lib/oeqa/selftest/cases/package.py46
-rw-r--r--meta/lib/oeqa/selftest/cases/pkgdata.py11
-rw-r--r--meta/lib/oeqa/selftest/cases/prservice.py35
-rw-r--r--meta/lib/oeqa/selftest/cases/pseudo.py2
-rw-r--r--meta/lib/oeqa/selftest/cases/recipetool.py857
-rw-r--r--meta/lib/oeqa/selftest/cases/recipeutils.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/reproducible.py77
-rw-r--r--meta/lib/oeqa/selftest/cases/resulttooltests.py279
-rw-r--r--meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py97
-rw-r--r--meta/lib/oeqa/selftest/cases/rpmtests.py14
-rw-r--r--meta/lib/oeqa/selftest/cases/runcmd.py10
-rw-r--r--meta/lib/oeqa/selftest/cases/runqemu.py83
-rw-r--r--meta/lib/oeqa/selftest/cases/runtime_test.py164
-rw-r--r--meta/lib/oeqa/selftest/cases/rust.py231
-rw-r--r--meta/lib/oeqa/selftest/cases/selftest.py3
-rw-r--r--meta/lib/oeqa/selftest/cases/signing.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/spdx.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/sstate.py67
-rw-r--r--meta/lib/oeqa/selftest/cases/sstatetests.py711
-rw-r--r--meta/lib/oeqa/selftest/cases/sysroot.py59
-rw-r--r--meta/lib/oeqa/selftest/cases/tinfoil.py54
-rw-r--r--meta/lib/oeqa/selftest/cases/usergrouptests.py57
-rw-r--r--meta/lib/oeqa/selftest/cases/wic.py762
-rw-r--r--meta/lib/oeqa/selftest/cases/wrapper.py16
-rw-r--r--meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py39
-rw-r--r--meta/lib/oeqa/selftest/context.py148
-rw-r--r--meta/lib/oeqa/targetcontrol.py21
-rw-r--r--meta/lib/oeqa/utils/__init__.py9
-rw-r--r--meta/lib/oeqa/utils/buildproject.py3
-rw-r--r--meta/lib/oeqa/utils/commands.py39
-rw-r--r--meta/lib/oeqa/utils/decorators.py85
-rw-r--r--meta/lib/oeqa/utils/dump.py89
-rw-r--r--meta/lib/oeqa/utils/ftools.py2
-rw-r--r--meta/lib/oeqa/utils/gitarchive.py56
-rw-r--r--meta/lib/oeqa/utils/httpserver.py29
-rw-r--r--meta/lib/oeqa/utils/logparser.py98
-rw-r--r--meta/lib/oeqa/utils/metadata.py6
-rw-r--r--meta/lib/oeqa/utils/network.py2
-rw-r--r--meta/lib/oeqa/utils/nfs.py10
-rw-r--r--meta/lib/oeqa/utils/package_manager.py2
-rw-r--r--meta/lib/oeqa/utils/postactions.py98
-rw-r--r--meta/lib/oeqa/utils/qemurunner.py447
-rw-r--r--meta/lib/oeqa/utils/qemutinyrunner.py6
-rw-r--r--meta/lib/oeqa/utils/subprocesstweak.py2
-rw-r--r--meta/lib/oeqa/utils/targetbuild.py4
212 files changed, 9824 insertions, 2629 deletions
diff --git a/meta/lib/oeqa/buildperf/base.py b/meta/lib/oeqa/buildperf/base.py
index 5f1805d86c..5d656c781a 100644
--- a/meta/lib/oeqa/buildperf/base.py
+++ b/meta/lib/oeqa/buildperf/base.py
@@ -444,7 +444,7 @@ class BuildPerfTestCase(unittest.TestCase):
444 buildstats = [] 444 buildstats = []
445 for fname in os.listdir(bs_dir): 445 for fname in os.listdir(bs_dir):
446 recipe_dir = os.path.join(bs_dir, fname) 446 recipe_dir = os.path.join(bs_dir, fname)
447 if not os.path.isdir(recipe_dir): 447 if not os.path.isdir(recipe_dir) or fname == "reduced_proc_pressure":
448 continue 448 continue
449 name, epoch, version, revision = split_nevr(fname) 449 name, epoch, version, revision = split_nevr(fname)
450 recipe_bs = OrderedDict((('name', name), 450 recipe_bs = OrderedDict((('name', name),
diff --git a/meta/lib/oeqa/controllers/__init__.py b/meta/lib/oeqa/controllers/__init__.py
index cc3836c4bf..0fc905be9a 100644
--- a/meta/lib/oeqa/controllers/__init__.py
+++ b/meta/lib/oeqa/controllers/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
diff --git a/meta/lib/oeqa/controllers/masterimage.py b/meta/lib/oeqa/controllers/controllerimage.py
index 0bf5917e48..78a4aaff87 100644
--- a/meta/lib/oeqa/controllers/masterimage.py
+++ b/meta/lib/oeqa/controllers/controllerimage.py
@@ -3,13 +3,13 @@
3# SPDX-License-Identifier: MIT 3# SPDX-License-Identifier: MIT
4# 4#
5# This module adds support to testimage.bbclass to deploy images and run 5# This module adds support to testimage.bbclass to deploy images and run
6# tests using a "master image" - this is a "known good" image that is 6# tests using a "controller image" - this is a "known good" image that is
7# installed onto the device as part of initial setup and will be booted into 7# installed onto the device as part of initial setup and will be booted into
8# with no interaction; we can then use it to deploy the image to be tested 8# with no interaction; we can then use it to deploy the image to be tested
9# to a second partition before running the tests. 9# to a second partition before running the tests.
10# 10#
11# For an example master image, see core-image-testmaster 11# For an example controller image, see core-image-testcontroller
12# (meta/recipes-extended/images/core-image-testmaster.bb) 12# (meta/recipes-extended/images/core-image-testcontroller.bb)
13 13
14import os 14import os
15import bb 15import bb
@@ -24,12 +24,12 @@ from oeqa.utils import CommandError
24 24
25from abc import ABCMeta, abstractmethod 25from abc import ABCMeta, abstractmethod
26 26
27class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta): 27class ControllerImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta):
28 28
29 supported_image_fstypes = ['tar.gz', 'tar.bz2'] 29 supported_image_fstypes = ['tar.gz', 'tar.bz2']
30 30
31 def __init__(self, d): 31 def __init__(self, d):
32 super(MasterImageHardwareTarget, self).__init__(d) 32 super(ControllerImageHardwareTarget, self).__init__(d)
33 33
34 # target ip 34 # target ip
35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') 35 addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.')
@@ -61,8 +61,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
61 if not os.path.isfile(self.kernel): 61 if not os.path.isfile(self.kernel):
62 bb.fatal("No kernel found. Expected path: %s" % self.kernel) 62 bb.fatal("No kernel found. Expected path: %s" % self.kernel)
63 63
64 # master ssh connection 64 # controller ssh connection
65 self.master = None 65 self.controller = None
66 # if the user knows what they are doing, then by all means... 66 # if the user knows what they are doing, then by all means...
67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS") 67 self.user_cmds = d.getVar("TEST_DEPLOY_CMDS")
68 self.deploy_cmds = None 68 self.deploy_cmds = None
@@ -119,19 +119,19 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
119 119
120 def deploy(self): 120 def deploy(self):
121 # base class just sets the ssh log file for us 121 # base class just sets the ssh log file for us
122 super(MasterImageHardwareTarget, self).deploy() 122 super(ControllerImageHardwareTarget, self).deploy()
123 self.master = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port) 123 self.controller = sshcontrol.SSHControl(ip=self.ip, logfile=self.sshlog, timeout=600, port=self.port)
124 status, output = self.master.run("cat /etc/masterimage") 124 status, output = self.controller.run("cat /etc/controllerimage")
125 if status != 0: 125 if status != 0:
126 # We're not booted into the master image, so try rebooting 126 # We're not booted into the controller image, so try rebooting
127 bb.plain("%s - booting into the master image" % self.pn) 127 bb.plain("%s - booting into the controller image" % self.pn)
128 self.power_ctl("cycle") 128 self.power_ctl("cycle")
129 self._wait_until_booted() 129 self._wait_until_booted()
130 130
131 bb.plain("%s - deploying image on target" % self.pn) 131 bb.plain("%s - deploying image on target" % self.pn)
132 status, output = self.master.run("cat /etc/masterimage") 132 status, output = self.controller.run("cat /etc/controllerimage")
133 if status != 0: 133 if status != 0:
134 bb.fatal("No ssh connectivity or target isn't running a master image.\n%s" % output) 134 bb.fatal("No ssh connectivity or target isn't running a controller image.\n%s" % output)
135 if self.user_cmds: 135 if self.user_cmds:
136 self.deploy_cmds = self.user_cmds.split("\n") 136 self.deploy_cmds = self.user_cmds.split("\n")
137 try: 137 try:
@@ -156,10 +156,10 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta
156 156
157 def stop(self): 157 def stop(self):
158 bb.plain("%s - reboot/powercycle target" % self.pn) 158 bb.plain("%s - reboot/powercycle target" % self.pn)
159 self.power_cycle(self.master) 159 self.power_cycle(self.controller)
160 160
161 161
162class SystemdbootTarget(MasterImageHardwareTarget): 162class SystemdbootTarget(ControllerImageHardwareTarget):
163 163
164 def __init__(self, d): 164 def __init__(self, d):
165 super(SystemdbootTarget, self).__init__(d) 165 super(SystemdbootTarget, self).__init__(d)
@@ -184,16 +184,16 @@ class SystemdbootTarget(MasterImageHardwareTarget):
184 184
185 def _deploy(self): 185 def _deploy(self):
186 # make sure these aren't mounted 186 # make sure these aren't mounted
187 self.master.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;") 187 self.controller.run("umount /boot; umount /mnt/testrootfs; umount /sys/firmware/efi/efivars;")
188 # from now on, every deploy cmd should return 0 188 # from now on, every deploy cmd should return 0
189 # else an exception will be thrown by sshcontrol 189 # else an exception will be thrown by sshcontrol
190 self.master.ignore_status = False 190 self.controller.ignore_status = False
191 self.master.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype) 191 self.controller.copy_to(self.rootfs, "~/test-rootfs." + self.image_fstype)
192 self.master.copy_to(self.kernel, "~/test-kernel") 192 self.controller.copy_to(self.kernel, "~/test-kernel")
193 for cmd in self.deploy_cmds: 193 for cmd in self.deploy_cmds:
194 self.master.run(cmd) 194 self.controller.run(cmd)
195 195
196 def _start(self, params=None): 196 def _start(self, params=None):
197 self.power_cycle(self.master) 197 self.power_cycle(self.controller)
198 # there are better ways than a timeout but this should work for now 198 # there are better ways than a timeout but this should work for now
199 time.sleep(120) 199 time.sleep(120)
diff --git a/meta/lib/oeqa/controllers/testtargetloader.py b/meta/lib/oeqa/controllers/testtargetloader.py
index 23101c7371..209ff7061a 100644
--- a/meta/lib/oeqa/controllers/testtargetloader.py
+++ b/meta/lib/oeqa/controllers/testtargetloader.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
diff --git a/meta/lib/oeqa/core/case.py b/meta/lib/oeqa/core/case.py
index aae451fef2..bc4446a938 100644
--- a/meta/lib/oeqa/core/case.py
+++ b/meta/lib/oeqa/core/case.py
@@ -43,8 +43,13 @@ class OETestCase(unittest.TestCase):
43 clss.tearDownClassMethod() 43 clss.tearDownClassMethod()
44 44
45 def _oeSetUp(self): 45 def _oeSetUp(self):
46 for d in self.decorators: 46 try:
47 d.setUpDecorator() 47 for d in self.decorators:
48 d.setUpDecorator()
49 except:
50 for d in self.decorators:
51 d.tearDownDecorator()
52 raise
48 self.setUpMethod() 53 self.setUpMethod()
49 54
50 def _oeTearDown(self): 55 def _oeTearDown(self):
diff --git a/meta/lib/oeqa/core/context.py b/meta/lib/oeqa/core/context.py
index 2abe353d27..9313271f58 100644
--- a/meta/lib/oeqa/core/context.py
+++ b/meta/lib/oeqa/core/context.py
@@ -81,7 +81,7 @@ class OETestContext(object):
81 def runTests(self, processes=None, skips=[]): 81 def runTests(self, processes=None, skips=[]):
82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2) 82 self.runner = self.runnerClass(self, descriptions=False, verbosity=2)
83 83
84 # Dinamically skip those tests specified though arguments 84 # Dynamically skip those tests specified though arguments
85 self.skipTests(skips) 85 self.skipTests(skips)
86 86
87 self._run_start_time = time.time() 87 self._run_start_time = time.time()
diff --git a/meta/lib/oeqa/core/decorator/__init__.py b/meta/lib/oeqa/core/decorator/__init__.py
index 1a82518ab6..93efd30e1d 100644
--- a/meta/lib/oeqa/core/decorator/__init__.py
+++ b/meta/lib/oeqa/core/decorator/__init__.py
@@ -5,8 +5,7 @@
5# 5#
6 6
7from functools import wraps 7from functools import wraps
8from abc import abstractmethod, ABCMeta 8from abc import ABCMeta
9from oeqa.core.utils.misc import strToList
10 9
11decoratorClasses = set() 10decoratorClasses = set()
12 11
@@ -65,15 +64,11 @@ class OETestDiscover(OETestDecorator):
65 return registry['cases'] 64 return registry['cases']
66 65
67def OETestTag(*tags): 66def OETestTag(*tags):
68 expandedtags = []
69 for tag in tags:
70 expandedtags += strToList(tag)
71 def decorator(item): 67 def decorator(item):
72 if hasattr(item, "__oeqa_testtags"): 68 if hasattr(item, "__oeqa_testtags"):
73 # do not append, create a new list (to handle classes with inheritance) 69 # do not append, create a new list (to handle classes with inheritance)
74 item.__oeqa_testtags = list(item.__oeqa_testtags) + expandedtags 70 item.__oeqa_testtags = list(item.__oeqa_testtags) + list(tags)
75 else: 71 else:
76 item.__oeqa_testtags = expandedtags 72 item.__oeqa_testtags = tags
77 return item 73 return item
78 return decorator 74 return decorator
79
diff --git a/meta/lib/oeqa/core/decorator/data.py b/meta/lib/oeqa/core/decorator/data.py
index bc4939e87c..5444b2cb75 100644
--- a/meta/lib/oeqa/core/decorator/data.py
+++ b/meta/lib/oeqa/core/decorator/data.py
@@ -13,8 +13,8 @@ def has_feature(td, feature):
13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES. 13 Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES.
14 """ 14 """
15 15
16 if (feature in td.get('DISTRO_FEATURES', '') or 16 if (feature in td.get('DISTRO_FEATURES', '').split() or
17 feature in td.get('IMAGE_FEATURES', '')): 17 feature in td.get('IMAGE_FEATURES', '').split()):
18 return True 18 return True
19 return False 19 return False
20 20
@@ -23,18 +23,7 @@ def has_machine(td, machine):
23 Checks for MACHINE. 23 Checks for MACHINE.
24 """ 24 """
25 25
26 if (machine in td.get('MACHINE', '')): 26 if (machine == td.get('MACHINE', '')):
27 return True
28 return False
29
30def is_qemu(td, qemu):
31 """
32 Checks if MACHINE is qemu.
33 """
34
35 machine = td.get('MACHINE', '')
36 if (qemu in td.get('MACHINE', '') or
37 machine.startswith('qemu')):
38 return True 27 return True
39 return False 28 return False
40 29
@@ -189,34 +178,53 @@ class skipIfMachine(OETestDecorator):
189@registerDecorator 178@registerDecorator
190class skipIfNotQemu(OETestDecorator): 179class skipIfNotQemu(OETestDecorator):
191 """ 180 """
192 Skip test based on MACHINE. 181 Skip test if MACHINE is not qemu*
193
194 value must be a qemu MACHINE or it will skip the test
195 with msg as the reason.
196 """ 182 """
183 def setUpDecorator(self):
184 self.logger.debug("Checking if not qemu MACHINE")
185 if not self.case.td.get('MACHINE', '').startswith('qemu'):
186 self.case.skipTest('Test only runs on qemu machines')
197 187
198 attrs = ('value', 'msg') 188@registerDecorator
199 189class skipIfNotQemuUsermode(OETestDecorator):
190 """
191 Skip test if MACHINE_FEATURES does not contain qemu-usermode
192 """
200 def setUpDecorator(self): 193 def setUpDecorator(self):
201 msg = ('Checking if %s is not this MACHINE' % self.value) 194 self.logger.debug("Checking if MACHINE_FEATURES does not contain qemu-usermode")
202 self.logger.debug(msg) 195 if 'qemu-usermode' not in self.case.td.get('MACHINE_FEATURES', '').split():
203 if not is_qemu(self.case.td, self.value): 196 self.case.skipTest('Test requires qemu-usermode in MACHINE_FEATURES')
204 self.case.skipTest(self.msg)
205 197
206@registerDecorator 198@registerDecorator
207class skipIfQemu(OETestDecorator): 199class skipIfQemu(OETestDecorator):
208 """ 200 """
209 Skip test based on Qemu Machine. 201 Skip test if MACHINE is qemu*
210 202 """
211 value must not be a qemu machine or it will skip the test 203 def setUpDecorator(self):
212 with msg as the reason. 204 self.logger.debug("Checking if qemu MACHINE")
213 """ 205 if self.case.td.get('MACHINE', '').startswith('qemu'):
206 self.case.skipTest('Test only runs on real hardware')
214 207
215 attrs = ('value', 'msg') 208@registerDecorator
209class skipIfArch(OETestDecorator):
210 """
211 Skip test if HOST_ARCH is present in the tuple specified.
212 """
216 213
214 attrs = ('archs',)
217 def setUpDecorator(self): 215 def setUpDecorator(self):
218 msg = ('Checking if %s is this MACHINE' % self.value) 216 arch = self.case.td['HOST_ARCH']
219 self.logger.debug(msg) 217 if arch in self.archs:
220 if is_qemu(self.case.td, self.value): 218 self.case.skipTest('Test skipped on %s' % arch)
221 self.case.skipTest(self.msg) 219
220@registerDecorator
221class skipIfNotArch(OETestDecorator):
222 """
223 Skip test if HOST_ARCH is not present in the tuple specified.
224 """
222 225
226 attrs = ('archs',)
227 def setUpDecorator(self):
228 arch = self.case.td['HOST_ARCH']
229 if arch not in self.archs:
230 self.case.skipTest('Test skipped on %s' % arch)
diff --git a/meta/lib/oeqa/core/decorator/oetimeout.py b/meta/lib/oeqa/core/decorator/oetimeout.py
index df90d1c798..5e6873ad48 100644
--- a/meta/lib/oeqa/core/decorator/oetimeout.py
+++ b/meta/lib/oeqa/core/decorator/oetimeout.py
@@ -24,5 +24,6 @@ class OETimeout(OETestDecorator):
24 24
25 def tearDownDecorator(self): 25 def tearDownDecorator(self):
26 signal.alarm(0) 26 signal.alarm(0)
27 signal.signal(signal.SIGALRM, self.alarmSignal) 27 if hasattr(self, 'alarmSignal'):
28 self.logger.debug("Removed SIGALRM handler") 28 signal.signal(signal.SIGALRM, self.alarmSignal)
29 self.logger.debug("Removed SIGALRM handler")
diff --git a/meta/lib/oeqa/core/loader.py b/meta/lib/oeqa/core/loader.py
index 11978213b8..d12d5a055c 100644
--- a/meta/lib/oeqa/core/loader.py
+++ b/meta/lib/oeqa/core/loader.py
@@ -37,7 +37,7 @@ def _find_duplicated_modules(suite, directory):
37 if path: 37 if path:
38 raise ImportError("Duplicated %s module found in %s" % (module, path)) 38 raise ImportError("Duplicated %s module found in %s" % (module, path))
39 39
40def _built_modules_dict(modules): 40def _built_modules_dict(modules, logger):
41 modules_dict = {} 41 modules_dict = {}
42 42
43 if modules == None: 43 if modules == None:
@@ -48,6 +48,9 @@ def _built_modules_dict(modules):
48 # characters, whereas class names do 48 # characters, whereas class names do
49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII) 49 m = re.match(r'^([0-9a-z_.]+)(?:\.(\w[^.]*)(?:\.([^.]+))?)?$', module, flags=re.ASCII)
50 if not m: 50 if not m:
51 logger.warn("module '%s' was skipped from selected modules, "\
52 "because it doesn't match with module name assumptions: "\
53 "package and module names do not contain upper case characters, whereas class names do" % module)
51 continue 54 continue
52 55
53 module_name, class_name, test_name = m.groups() 56 module_name, class_name, test_name = m.groups()
@@ -58,6 +61,8 @@ def _built_modules_dict(modules):
58 modules_dict[module_name][class_name] = [] 61 modules_dict[module_name][class_name] = []
59 if test_name and test_name not in modules_dict[module_name][class_name]: 62 if test_name and test_name not in modules_dict[module_name][class_name]:
60 modules_dict[module_name][class_name].append(test_name) 63 modules_dict[module_name][class_name].append(test_name)
64 if modules and not modules_dict:
65 raise OEQATestNotFound("All selected modules were skipped, this would trigger selftest with all tests and -r ignored.")
61 66
62 return modules_dict 67 return modules_dict
63 68
@@ -71,7 +76,7 @@ class OETestLoader(unittest.TestLoader):
71 *args, **kwargs): 76 *args, **kwargs):
72 self.tc = tc 77 self.tc = tc
73 78
74 self.modules = _built_modules_dict(modules) 79 self.modules = _built_modules_dict(modules, tc.logger)
75 80
76 self.tests = tests 81 self.tests = tests
77 self.modules_required = modules_required 82 self.modules_required = modules_required
@@ -311,6 +316,9 @@ class OETestLoader(unittest.TestLoader):
311 module_name_small in self.modules) \ 316 module_name_small in self.modules) \
312 else False 317 else False
313 318
319 if any(c.isupper() for c in module.__name__):
320 raise SystemExit("Module '%s' contains uppercase characters and this isn't supported. Please fix the module name." % module.__name__)
321
314 return (load_module, load_underscore) 322 return (load_module, load_underscore)
315 323
316 324
diff --git a/meta/lib/oeqa/core/runner.py b/meta/lib/oeqa/core/runner.py
index d50690ab37..a86a706bd9 100644
--- a/meta/lib/oeqa/core/runner.py
+++ b/meta/lib/oeqa/core/runner.py
@@ -44,6 +44,7 @@ class OETestResult(_TestResult):
44 self.endtime = {} 44 self.endtime = {}
45 self.progressinfo = {} 45 self.progressinfo = {}
46 self.extraresults = {} 46 self.extraresults = {}
47 self.shownmsg = []
47 48
48 # Inject into tc so that TestDepends decorator can see results 49 # Inject into tc so that TestDepends decorator can see results
49 tc.results = self 50 tc.results = self
@@ -74,6 +75,7 @@ class OETestResult(_TestResult):
74 for (scase, msg) in getattr(self, t): 75 for (scase, msg) in getattr(self, t):
75 if test.id() == scase.id(): 76 if test.id() == scase.id():
76 self.tc.logger.info(str(msg)) 77 self.tc.logger.info(str(msg))
78 self.shownmsg.append(test.id())
77 break 79 break
78 80
79 def logSummary(self, component, context_msg=''): 81 def logSummary(self, component, context_msg=''):
@@ -169,7 +171,6 @@ class OETestResult(_TestResult):
169 171
170 def logDetails(self, json_file_dir=None, configuration=None, result_id=None, 172 def logDetails(self, json_file_dir=None, configuration=None, result_id=None,
171 dump_streams=False): 173 dump_streams=False):
172 self.tc.logger.info("RESULTS:")
173 174
174 result = self.extraresults 175 result = self.extraresults
175 logs = {} 176 logs = {}
@@ -193,6 +194,10 @@ class OETestResult(_TestResult):
193 report = {'status': status} 194 report = {'status': status}
194 if log: 195 if log:
195 report['log'] = log 196 report['log'] = log
197 # Class setup failures wouldn't enter stopTest so would never display
198 if case.id() not in self.shownmsg:
199 self.tc.logger.info("Failure (%s) for %s:\n" % (status, case.id()) + log)
200
196 if duration: 201 if duration:
197 report['duration'] = duration 202 report['duration'] = duration
198 203
@@ -215,6 +220,7 @@ class OETestResult(_TestResult):
215 report['stderr'] = stderr 220 report['stderr'] = stderr
216 result[case.id()] = report 221 result[case.id()] = report
217 222
223 self.tc.logger.info("RESULTS:")
218 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']: 224 for i in ['PASSED', 'SKIPPED', 'EXPECTEDFAIL', 'ERROR', 'FAILED', 'UNKNOWN']:
219 if i not in logs: 225 if i not in logs:
220 continue 226 continue
@@ -229,6 +235,10 @@ class OETestResult(_TestResult):
229 # Override as we unexpected successes aren't failures for us 235 # Override as we unexpected successes aren't failures for us
230 return (len(self.failures) == len(self.errors) == 0) 236 return (len(self.failures) == len(self.errors) == 0)
231 237
238 def hasAnyFailingTest(self):
239 # Account for expected failures
240 return not self.wasSuccessful() or len(self.expectedFailures)
241
232class OEListTestsResult(object): 242class OEListTestsResult(object):
233 def wasSuccessful(self): 243 def wasSuccessful(self):
234 return True 244 return True
diff --git a/meta/lib/oeqa/core/target/qemu.py b/meta/lib/oeqa/core/target/qemu.py
index 0f29414df5..d93b3ac94a 100644
--- a/meta/lib/oeqa/core/target/qemu.py
+++ b/meta/lib/oeqa/core/target/qemu.py
@@ -8,20 +8,21 @@ import os
8import sys 8import sys
9import signal 9import signal
10import time 10import time
11import glob
12import subprocess
11from collections import defaultdict 13from collections import defaultdict
12 14
13from .ssh import OESSHTarget 15from .ssh import OESSHTarget
14from oeqa.utils.qemurunner import QemuRunner 16from oeqa.utils.qemurunner import QemuRunner
15from oeqa.utils.dump import TargetDumper
16 17
17supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] 18supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic']
18 19
19class OEQemuTarget(OESSHTarget): 20class OEQemuTarget(OESSHTarget):
20 def __init__(self, logger, server_ip, timeout=300, user='root', 21 def __init__(self, logger, server_ip, timeout=300, user='root',
21 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False, 22 port=None, machine='', rootfs='', kernel='', kvm=False, slirp=False,
22 dump_dir='', dump_host_cmds='', display='', bootlog='', 23 dump_dir='', display='', bootlog='',
23 tmpdir='', dir_image='', boottime=60, serial_ports=2, 24 tmpdir='', dir_image='', boottime=60, serial_ports=2,
24 boot_patterns = defaultdict(str), ovmf=False, **kwargs): 25 boot_patterns = defaultdict(str), ovmf=False, tmpfsdir=None, **kwargs):
25 26
26 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout, 27 super(OEQemuTarget, self).__init__(logger, None, server_ip, timeout,
27 user, port) 28 user, port)
@@ -35,17 +36,15 @@ class OEQemuTarget(OESSHTarget):
35 self.ovmf = ovmf 36 self.ovmf = ovmf
36 self.use_slirp = slirp 37 self.use_slirp = slirp
37 self.boot_patterns = boot_patterns 38 self.boot_patterns = boot_patterns
39 self.dump_dir = dump_dir
40 self.bootlog = bootlog
38 41
39 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir, 42 self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir,
40 deploy_dir_image=dir_image, display=display, 43 deploy_dir_image=dir_image, display=display,
41 logfile=bootlog, boottime=boottime, 44 logfile=bootlog, boottime=boottime,
42 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, 45 use_kvm=kvm, use_slirp=slirp, dump_dir=dump_dir, logger=logger,
43 dump_host_cmds=dump_host_cmds, logger=logger,
44 serial_ports=serial_ports, boot_patterns = boot_patterns, 46 serial_ports=serial_ports, boot_patterns = boot_patterns,
45 use_ovmf=ovmf) 47 use_ovmf=ovmf, tmpfsdir=tmpfsdir)
46 dump_target_cmds = kwargs.get("testimage_dump_target")
47 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner)
48 self.target_dumper.create_dir("qemu")
49 48
50 def start(self, params=None, extra_bootparams=None, runqemuparams=''): 49 def start(self, params=None, extra_bootparams=None, runqemuparams=''):
51 if self.use_slirp and not self.server_ip: 50 if self.use_slirp and not self.server_ip:
@@ -68,7 +67,28 @@ class OEQemuTarget(OESSHTarget):
68 self.server_ip = self.runner.server_ip 67 self.server_ip = self.runner.server_ip
69 else: 68 else:
70 self.stop() 69 self.stop()
71 raise RuntimeError("FAILED to start qemu - check the task log and the boot log") 70 # Display the first 20 lines of top and
71 # last 20 lines of the bootlog when the
72 # target is not being booted up.
73 topfile = glob.glob(self.dump_dir + "/*_qemu/host_*_top")
74 msg = "\n\n===== start: snippet =====\n\n"
75 for f in topfile:
76 msg += "file: %s\n\n" % f
77 with open(f) as tf:
78 for x in range(20):
79 msg += next(tf)
80 msg += "\n\n===== end: snippet =====\n\n"
81 blcmd = ["tail", "-20", self.bootlog]
82 msg += "===== start: snippet =====\n\n"
83 try:
84 out = subprocess.check_output(blcmd, stderr=subprocess.STDOUT, timeout=1).decode('utf-8')
85 msg += "file: %s\n\n" % self.bootlog
86 msg += out
87 except (subprocess.CalledProcessError, subprocess.TimeoutExpired, FileNotFoundError) as err:
88 msg += "Error running command: %s\n%s\n" % (blcmd, err)
89 msg += "\n\n===== end: snippet =====\n"
90
91 raise RuntimeError("FAILED to start qemu - check the task log and the boot log %s" % (msg))
72 92
73 def stop(self): 93 def stop(self):
74 self.runner.stop() 94 self.runner.stop()
diff --git a/meta/lib/oeqa/core/target/ssh.py b/meta/lib/oeqa/core/target/ssh.py
index 461448dbc5..09cdd14c75 100644
--- a/meta/lib/oeqa/core/target/ssh.py
+++ b/meta/lib/oeqa/core/target/ssh.py
@@ -34,12 +34,17 @@ class OESSHTarget(OETarget):
34 self.timeout = timeout 34 self.timeout = timeout
35 self.user = user 35 self.user = user
36 ssh_options = [ 36 ssh_options = [
37 '-o', 'ServerAliveCountMax=2',
38 '-o', 'ServerAliveInterval=30',
37 '-o', 'UserKnownHostsFile=/dev/null', 39 '-o', 'UserKnownHostsFile=/dev/null',
38 '-o', 'StrictHostKeyChecking=no', 40 '-o', 'StrictHostKeyChecking=no',
39 '-o', 'LogLevel=ERROR' 41 '-o', 'LogLevel=ERROR'
40 ] 42 ]
43 scp_options = [
44 '-r'
45 ]
41 self.ssh = ['ssh', '-l', self.user ] + ssh_options 46 self.ssh = ['ssh', '-l', self.user ] + ssh_options
42 self.scp = ['scp'] + ssh_options 47 self.scp = ['scp'] + ssh_options + scp_options
43 if port: 48 if port:
44 self.ssh = self.ssh + [ '-p', port ] 49 self.ssh = self.ssh + [ '-p', port ]
45 self.scp = self.scp + [ '-P', port ] 50 self.scp = self.scp + [ '-P', port ]
@@ -67,7 +72,7 @@ class OESSHTarget(OETarget):
67 72
68 return (status, output) 73 return (status, output)
69 74
70 def run(self, command, timeout=None): 75 def run(self, command, timeout=None, ignore_status=True):
71 """ 76 """
72 Runs command in target. 77 Runs command in target.
73 78
@@ -86,10 +91,9 @@ class OESSHTarget(OETarget):
86 else: 91 else:
87 processTimeout = self.timeout 92 processTimeout = self.timeout
88 93
89 status, output = self._run(sshCmd, processTimeout, True) 94 status, output = self._run(sshCmd, processTimeout, ignore_status)
90 self.logger.debug('Command: %s\nOutput: %s\n' % (command, output)) 95 self.logger.debug('Command: %s\nStatus: %d Output: %s\n' % (command, status, output))
91 if (status == 255) and (('No route to host') in output): 96
92 self.target_dumper.dump_target()
93 return (status, output) 97 return (status, output)
94 98
95 def copyTo(self, localSrc, remoteDst): 99 def copyTo(self, localSrc, remoteDst):
@@ -207,27 +211,41 @@ def SSHCall(command, logger, timeout=None, **opts):
207 def run(): 211 def run():
208 nonlocal output 212 nonlocal output
209 nonlocal process 213 nonlocal process
214 output_raw = b''
210 starttime = time.time() 215 starttime = time.time()
211 process = subprocess.Popen(command, **options) 216 process = subprocess.Popen(command, **options)
217 has_timeout = False
212 if timeout: 218 if timeout:
213 endtime = starttime + timeout 219 endtime = starttime + timeout
214 eof = False 220 eof = False
215 while time.time() < endtime and not eof: 221 os.set_blocking(process.stdout.fileno(), False)
216 logger.debug('time: %s, endtime: %s' % (time.time(), endtime)) 222 while not has_timeout and not eof:
217 try: 223 try:
224 logger.debug('Waiting for process output: time: %s, endtime: %s' % (time.time(), endtime))
218 if select.select([process.stdout], [], [], 5)[0] != []: 225 if select.select([process.stdout], [], [], 5)[0] != []:
219 reader = codecs.getreader('utf-8')(process.stdout, 'ignore') 226 # wait a bit for more data, tries to avoid reading single characters
220 data = reader.read(1024, 4096) 227 time.sleep(0.2)
228 data = process.stdout.read()
221 if not data: 229 if not data:
222 process.stdout.close()
223 eof = True 230 eof = True
224 else: 231 else:
225 output += data 232 output_raw += data
226 logger.debug('Partial data from SSH call: %s' % data) 233 # ignore errors to capture as much as possible
234 logger.debug('Partial data from SSH call:\n%s' % data.decode('utf-8', errors='ignore'))
227 endtime = time.time() + timeout 235 endtime = time.time() + timeout
228 except InterruptedError: 236 except InterruptedError:
237 logger.debug('InterruptedError')
238 continue
239 except BlockingIOError:
240 logger.debug('BlockingIOError')
229 continue 241 continue
230 242
243 if time.time() >= endtime:
244 logger.debug('SSHCall has timeout! Time: %s, endtime: %s' % (time.time(), endtime))
245 has_timeout = True
246
247 process.stdout.close()
248
231 # process hasn't returned yet 249 # process hasn't returned yet
232 if not eof: 250 if not eof:
233 process.terminate() 251 process.terminate()
@@ -235,16 +253,42 @@ def SSHCall(command, logger, timeout=None, **opts):
235 try: 253 try:
236 process.kill() 254 process.kill()
237 except OSError: 255 except OSError:
256 logger.debug('OSError when killing process')
238 pass 257 pass
239 endtime = time.time() - starttime 258 endtime = time.time() - starttime
240 lastline = ("\nProcess killed - no output for %d seconds. Total" 259 lastline = ("\nProcess killed - no output for %d seconds. Total"
241 " running time: %d seconds." % (timeout, endtime)) 260 " running time: %d seconds." % (timeout, endtime))
242 logger.debug('Received data from SSH call %s ' % lastline) 261 logger.debug('Received data from SSH call:\n%s ' % lastline)
243 output += lastline 262 output += lastline
263 process.wait()
244 264
245 else: 265 else:
246 output = process.communicate()[0].decode('utf-8', errors='ignore') 266 output_raw = process.communicate()[0]
247 logger.debug('Data from SSH call: %s' % output.rstrip()) 267
268 output = output_raw.decode('utf-8', errors='ignore')
269 logger.debug('Data from SSH call:\n%s' % output.rstrip())
270
271 # timout or not, make sure process exits and is not hanging
272 if process.returncode == None:
273 try:
274 process.wait(timeout=5)
275 except TimeoutExpired:
276 try:
277 process.kill()
278 except OSError:
279 logger.debug('OSError')
280 pass
281 process.wait()
282
283 if has_timeout:
284 # Version of openssh before 8.6_p1 returns error code 0 when killed
285 # by a signal, when the timeout occurs we will receive a 0 error
286 # code because the process is been terminated and it's wrong because
287 # that value means success, but the process timed out.
288 # Afterwards, from version 8.6_p1 onwards, the returned code is 255.
289 # Fix this behaviour by checking the return code
290 if process.returncode == 0:
291 process.returncode = 255
248 292
249 options = { 293 options = {
250 "stdout": subprocess.PIPE, 294 "stdout": subprocess.PIPE,
@@ -271,6 +315,9 @@ def SSHCall(command, logger, timeout=None, **opts):
271 # whilst running and ensure we don't leave a process behind. 315 # whilst running and ensure we don't leave a process behind.
272 if process.poll() is None: 316 if process.poll() is None:
273 process.kill() 317 process.kill()
318 if process.returncode == None:
319 process.wait()
274 logger.debug('Something went wrong, killing SSH process') 320 logger.debug('Something went wrong, killing SSH process')
275 raise 321 raise
276 return (process.wait(), output.rstrip()) 322
323 return (process.returncode, output.rstrip())
diff --git a/meta/lib/oeqa/core/tests/cases/timeout.py b/meta/lib/oeqa/core/tests/cases/timeout.py
index 5dfecc7b7c..69cf969a67 100644
--- a/meta/lib/oeqa/core/tests/cases/timeout.py
+++ b/meta/lib/oeqa/core/tests/cases/timeout.py
@@ -8,6 +8,7 @@ from time import sleep
8 8
9from oeqa.core.case import OETestCase 9from oeqa.core.case import OETestCase
10from oeqa.core.decorator.oetimeout import OETimeout 10from oeqa.core.decorator.oetimeout import OETimeout
11from oeqa.core.decorator.depends import OETestDepends
11 12
12class TimeoutTest(OETestCase): 13class TimeoutTest(OETestCase):
13 14
@@ -19,3 +20,15 @@ class TimeoutTest(OETestCase):
19 def testTimeoutFail(self): 20 def testTimeoutFail(self):
20 sleep(2) 21 sleep(2)
21 self.assertTrue(True, msg='How is this possible?') 22 self.assertTrue(True, msg='How is this possible?')
23
24
25 def testTimeoutSkip(self):
26 self.skipTest("This test needs to be skipped, so that testTimeoutDepends()'s OETestDepends kicks in")
27
28 @OETestDepends(["timeout.TimeoutTest.testTimeoutSkip"])
29 @OETimeout(3)
30 def testTimeoutDepends(self):
31 self.assertTrue(False, msg='How is this possible?')
32
33 def testTimeoutUnrelated(self):
34 sleep(6)
diff --git a/meta/lib/oeqa/core/tests/test_data.py b/meta/lib/oeqa/core/tests/test_data.py
index ac74098b78..acd726f3a0 100755
--- a/meta/lib/oeqa/core/tests/test_data.py
+++ b/meta/lib/oeqa/core/tests/test_data.py
@@ -33,7 +33,7 @@ class TestData(TestBase):
33 33
34 def test_data_fail_wrong_variable(self): 34 def test_data_fail_wrong_variable(self):
35 expectedError = 'AssertionError' 35 expectedError = 'AssertionError'
36 d = {'IMAGE' : 'core-image-sato', 'ARCH' : 'arm'} 36 d = {'IMAGE' : 'core-image-weston', 'ARCH' : 'arm'}
37 37
38 tc = self._testLoader(d=d, modules=self.modules) 38 tc = self._testLoader(d=d, modules=self.modules)
39 results = tc.runTests() 39 results = tc.runTests()
diff --git a/meta/lib/oeqa/core/tests/test_decorators.py b/meta/lib/oeqa/core/tests/test_decorators.py
index b798bf7d33..5095f39948 100755
--- a/meta/lib/oeqa/core/tests/test_decorators.py
+++ b/meta/lib/oeqa/core/tests/test_decorators.py
@@ -133,5 +133,11 @@ class TestTimeoutDecorator(TestBase):
133 msg = "OETestTimeout didn't restore SIGALRM" 133 msg = "OETestTimeout didn't restore SIGALRM"
134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) 134 self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg)
135 135
136 def test_timeout_cancel(self):
137 tests = ['timeout.TimeoutTest.testTimeoutSkip', 'timeout.TimeoutTest.testTimeoutDepends', 'timeout.TimeoutTest.testTimeoutUnrelated']
138 msg = 'Unrelated test failed to complete'
139 tc = self._testLoader(modules=self.modules, tests=tests)
140 self.assertTrue(tc.runTests().wasSuccessful(), msg=msg)
141
136if __name__ == '__main__': 142if __name__ == '__main__':
137 unittest.main() 143 unittest.main()
diff --git a/meta/lib/oeqa/core/utils/concurrencytest.py b/meta/lib/oeqa/core/utils/concurrencytest.py
index b2eb68fb02..d10f8f7f04 100644
--- a/meta/lib/oeqa/core/utils/concurrencytest.py
+++ b/meta/lib/oeqa/core/utils/concurrencytest.py
@@ -1,5 +1,7 @@
1#!/usr/bin/env python3 1#!/usr/bin/env python3
2# 2#
3# Copyright OpenEmbedded Contributors
4#
3# SPDX-License-Identifier: GPL-2.0-or-later 5# SPDX-License-Identifier: GPL-2.0-or-later
4# 6#
5# Modified for use in OE by Richard Purdie, 2018 7# Modified for use in OE by Richard Purdie, 2018
@@ -48,11 +50,16 @@ _all__ = [
48# 50#
49class BBThreadsafeForwardingResult(ThreadsafeForwardingResult): 51class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
50 52
51 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests): 53 def __init__(self, target, semaphore, threadnum, totalinprocess, totaltests, output, finalresult):
52 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore) 54 super(BBThreadsafeForwardingResult, self).__init__(target, semaphore)
53 self.threadnum = threadnum 55 self.threadnum = threadnum
54 self.totalinprocess = totalinprocess 56 self.totalinprocess = totalinprocess
55 self.totaltests = totaltests 57 self.totaltests = totaltests
58 self.buffer = True
59 self.outputbuf = output
60 self.finalresult = finalresult
61 self.finalresult.buffer = True
62 self.target = target
56 63
57 def _add_result_with_semaphore(self, method, test, *args, **kwargs): 64 def _add_result_with_semaphore(self, method, test, *args, **kwargs):
58 self.semaphore.acquire() 65 self.semaphore.acquire()
@@ -61,16 +68,19 @@ class BBThreadsafeForwardingResult(ThreadsafeForwardingResult):
61 self.result.starttime[test.id()] = self._test_start.timestamp() 68 self.result.starttime[test.id()] = self._test_start.timestamp()
62 self.result.threadprogress[self.threadnum].append(test.id()) 69 self.result.threadprogress[self.threadnum].append(test.id())
63 totalprogress = sum(len(x) for x in self.result.threadprogress.values()) 70 totalprogress = sum(len(x) for x in self.result.threadprogress.values())
64 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s)" % ( 71 self.result.progressinfo[test.id()] = "%s: %s/%s %s/%s (%ss) (%s failed) (%s)" % (
65 self.threadnum, 72 self.threadnum,
66 len(self.result.threadprogress[self.threadnum]), 73 len(self.result.threadprogress[self.threadnum]),
67 self.totalinprocess, 74 self.totalinprocess,
68 totalprogress, 75 totalprogress,
69 self.totaltests, 76 self.totaltests,
70 "{0:.2f}".format(time.time()-self._test_start.timestamp()), 77 "{0:.2f}".format(time.time()-self._test_start.timestamp()),
78 self.target.failed_tests,
71 test.id()) 79 test.id())
72 finally: 80 finally:
73 self.semaphore.release() 81 self.semaphore.release()
82 self.finalresult._stderr_buffer = io.StringIO(initial_value=self.outputbuf.getvalue().decode("utf-8"))
83 self.finalresult._stdout_buffer = io.StringIO()
74 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs) 84 super(BBThreadsafeForwardingResult, self)._add_result_with_semaphore(method, test, *args, **kwargs)
75 85
76class ProxyTestResult: 86class ProxyTestResult:
@@ -183,35 +193,28 @@ class dummybuf(object):
183# 193#
184class ConcurrentTestSuite(unittest.TestSuite): 194class ConcurrentTestSuite(unittest.TestSuite):
185 195
186 def __init__(self, suite, processes, setupfunc, removefunc): 196 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
187 super(ConcurrentTestSuite, self).__init__([suite]) 197 super(ConcurrentTestSuite, self).__init__([suite])
188 self.processes = processes 198 self.processes = processes
189 self.setupfunc = setupfunc 199 self.setupfunc = setupfunc
190 self.removefunc = removefunc 200 self.removefunc = removefunc
201 self.bb_vars = bb_vars
191 202
192 def run(self, result): 203 def run(self, result):
193 tests, totaltests = fork_for_tests(self.processes, self) 204 testservers, totaltests = fork_for_tests(self.processes, self)
194 try: 205 try:
195 threads = {} 206 threads = {}
196 queue = Queue() 207 queue = Queue()
197 semaphore = threading.Semaphore(1) 208 semaphore = threading.Semaphore(1)
198 result.threadprogress = {} 209 result.threadprogress = {}
199 for i, (test, testnum) in enumerate(tests): 210 for i, (testserver, testnum, output) in enumerate(testservers):
200 result.threadprogress[i] = [] 211 result.threadprogress[i] = []
201 process_result = BBThreadsafeForwardingResult( 212 process_result = BBThreadsafeForwardingResult(
202 ExtraResultsDecoderTestResult(result), 213 ExtraResultsDecoderTestResult(result),
203 semaphore, i, testnum, totaltests) 214 semaphore, i, testnum, totaltests, output, result)
204 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
205 # as per default in parent code
206 process_result.buffer = True
207 # We have to add a buffer object to stdout to keep subunit happy
208 process_result._stderr_buffer = io.StringIO()
209 process_result._stderr_buffer.buffer = dummybuf(process_result._stderr_buffer)
210 process_result._stdout_buffer = io.StringIO()
211 process_result._stdout_buffer.buffer = dummybuf(process_result._stdout_buffer)
212 reader_thread = threading.Thread( 215 reader_thread = threading.Thread(
213 target=self._run_test, args=(test, process_result, queue)) 216 target=self._run_test, args=(testserver, process_result, queue))
214 threads[test] = reader_thread, process_result 217 threads[testserver] = reader_thread, process_result
215 reader_thread.start() 218 reader_thread.start()
216 while threads: 219 while threads:
217 finished_test = queue.get() 220 finished_test = queue.get()
@@ -222,13 +225,13 @@ class ConcurrentTestSuite(unittest.TestSuite):
222 process_result.stop() 225 process_result.stop()
223 raise 226 raise
224 finally: 227 finally:
225 for test in tests: 228 for testserver in testservers:
226 test[0]._stream.close() 229 testserver[0]._stream.close()
227 230
228 def _run_test(self, test, process_result, queue): 231 def _run_test(self, testserver, process_result, queue):
229 try: 232 try:
230 try: 233 try:
231 test.run(process_result) 234 testserver.run(process_result)
232 except Exception: 235 except Exception:
233 # The run logic itself failed 236 # The run logic itself failed
234 case = testtools.ErrorHolder( 237 case = testtools.ErrorHolder(
@@ -236,12 +239,12 @@ class ConcurrentTestSuite(unittest.TestSuite):
236 error=sys.exc_info()) 239 error=sys.exc_info())
237 case.run(process_result) 240 case.run(process_result)
238 finally: 241 finally:
239 queue.put(test) 242 queue.put(testserver)
240 243
241def fork_for_tests(concurrency_num, suite): 244def fork_for_tests(concurrency_num, suite):
242 result = [] 245 testservers = []
243 if 'BUILDDIR' in os.environ: 246 if 'BUILDDIR' in os.environ:
244 selftestdir = get_test_layer() 247 selftestdir = get_test_layer(suite.bb_vars['BBLAYERS'])
245 248
246 test_blocks = partition_tests(suite, concurrency_num) 249 test_blocks = partition_tests(suite, concurrency_num)
247 # Clear the tests from the original suite so it doesn't keep them alive 250 # Clear the tests from the original suite so it doesn't keep them alive
@@ -261,7 +264,7 @@ def fork_for_tests(concurrency_num, suite):
261 ourpid = os.getpid() 264 ourpid = os.getpid()
262 try: 265 try:
263 newbuilddir = None 266 newbuilddir = None
264 stream = os.fdopen(c2pwrite, 'wb', 1) 267 stream = os.fdopen(c2pwrite, 'wb')
265 os.close(c2pread) 268 os.close(c2pread)
266 269
267 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite) 270 (builddir, newbuilddir) = suite.setupfunc("-st-" + str(ourpid), selftestdir, process_suite)
@@ -273,10 +276,11 @@ def fork_for_tests(concurrency_num, suite):
273 newsi = os.open(os.devnull, os.O_RDWR) 276 newsi = os.open(os.devnull, os.O_RDWR)
274 os.dup2(newsi, sys.stdin.fileno()) 277 os.dup2(newsi, sys.stdin.fileno())
275 278
279 # Send stdout/stderr over the stream
280 os.dup2(c2pwrite, sys.stdout.fileno())
281 os.dup2(c2pwrite, sys.stderr.fileno())
282
276 subunit_client = TestProtocolClient(stream) 283 subunit_client = TestProtocolClient(stream)
277 # Force buffering of stdout/stderr so the console doesn't get corrupted by test output
278 # as per default in parent code
279 subunit_client.buffer = True
280 subunit_result = AutoTimingTestResultDecorator(subunit_client) 284 subunit_result = AutoTimingTestResultDecorator(subunit_client)
281 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result)) 285 unittest_result = process_suite.run(ExtraResultsEncoderTestResult(subunit_result))
282 if ourpid != os.getpid(): 286 if ourpid != os.getpid():
@@ -305,10 +309,12 @@ def fork_for_tests(concurrency_num, suite):
305 os._exit(0) 309 os._exit(0)
306 else: 310 else:
307 os.close(c2pwrite) 311 os.close(c2pwrite)
308 stream = os.fdopen(c2pread, 'rb', 1) 312 stream = os.fdopen(c2pread, 'rb')
309 test = ProtocolTestCase(stream) 313 # Collect stdout/stderr into an io buffer
310 result.append((test, numtests)) 314 output = io.BytesIO()
311 return result, totaltests 315 testserver = ProtocolTestCase(stream, passthrough=output)
316 testservers.append((testserver, numtests, output))
317 return testservers, totaltests
312 318
313def partition_tests(suite, count): 319def partition_tests(suite, count):
314 # Keep tests from the same class together but allow tests from modules 320 # Keep tests from the same class together but allow tests from modules
diff --git a/meta/lib/oeqa/core/utils/misc.py b/meta/lib/oeqa/core/utils/misc.py
deleted file mode 100644
index e1a59588eb..0000000000
--- a/meta/lib/oeqa/core/utils/misc.py
+++ /dev/null
@@ -1,47 +0,0 @@
1#
2# Copyright (C) 2016 Intel Corporation
3#
4# SPDX-License-Identifier: MIT
5#
6
7def toList(obj, obj_type, obj_name="Object"):
8 if isinstance(obj, obj_type):
9 return [obj]
10 elif isinstance(obj, list):
11 return obj
12 else:
13 raise TypeError("%s must be %s or list" % (obj_name, obj_type))
14
15def toSet(obj, obj_type, obj_name="Object"):
16 if isinstance(obj, obj_type):
17 return {obj}
18 elif isinstance(obj, list):
19 return set(obj)
20 elif isinstance(obj, set):
21 return obj
22 else:
23 raise TypeError("%s must be %s or set" % (obj_name, obj_type))
24
25def strToList(obj, obj_name="Object"):
26 return toList(obj, str, obj_name)
27
28def strToSet(obj, obj_name="Object"):
29 return toSet(obj, str, obj_name)
30
31def intToList(obj, obj_name="Object"):
32 return toList(obj, int, obj_name)
33
34def dataStoteToDict(d, variables):
35 data = {}
36
37 for v in variables:
38 data[v] = d.getVar(v)
39
40 return data
41
42def updateTestData(d, td, variables):
43 """
44 Updates variables with values of data store to test data.
45 """
46 for var in variables:
47 td[var] = d.getVar(var)
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
new file mode 100644
index 0000000000..de95025e86
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/Cargo.toml
@@ -0,0 +1,20 @@
1[package]
2name = "guessing-game"
3version = "0.1.0"
4edition = "2021"
5
6# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
7
8[lib]
9name = "guessing_game"
10# "cdylib" is necessary to produce a shared library for Python to import from.
11crate-type = ["cdylib"]
12
13[dependencies]
14rand = "0.8.4"
15
16[dependencies.pyo3]
17version = "0.19.0"
18# "abi3-py38" tells pyo3 (and maturin) to build using the stable ABI with minimum Python version 3.8
19features = ["abi3-py38"]
20
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
new file mode 100644
index 0000000000..16fe87b06e
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-APACHE
@@ -0,0 +1,201 @@
1 Apache License
2 Version 2.0, January 2004
3 http://www.apache.org/licenses/
4
5TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
71. Definitions.
8
9 "License" shall mean the terms and conditions for use, reproduction,
10 and distribution as defined by Sections 1 through 9 of this document.
11
12 "Licensor" shall mean the copyright owner or entity authorized by
13 the copyright owner that is granting the License.
14
15 "Legal Entity" shall mean the union of the acting entity and all
16 other entities that control, are controlled by, or are under common
17 control with that entity. For the purposes of this definition,
18 "control" means (i) the power, direct or indirect, to cause the
19 direction or management of such entity, whether by contract or
20 otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 outstanding shares, or (iii) beneficial ownership of such entity.
22
23 "You" (or "Your") shall mean an individual or Legal Entity
24 exercising permissions granted by this License.
25
26 "Source" form shall mean the preferred form for making modifications,
27 including but not limited to software source code, documentation
28 source, and configuration files.
29
30 "Object" form shall mean any form resulting from mechanical
31 transformation or translation of a Source form, including but
32 not limited to compiled object code, generated documentation,
33 and conversions to other media types.
34
35 "Work" shall mean the work of authorship, whether in Source or
36 Object form, made available under the License, as indicated by a
37 copyright notice that is included in or attached to the work
38 (an example is provided in the Appendix below).
39
40 "Derivative Works" shall mean any work, whether in Source or Object
41 form, that is based on (or derived from) the Work and for which the
42 editorial revisions, annotations, elaborations, or other modifications
43 represent, as a whole, an original work of authorship. For the purposes
44 of this License, Derivative Works shall not include works that remain
45 separable from, or merely link (or bind by name) to the interfaces of,
46 the Work and Derivative Works thereof.
47
48 "Contribution" shall mean any work of authorship, including
49 the original version of the Work and any modifications or additions
50 to that Work or Derivative Works thereof, that is intentionally
51 submitted to Licensor for inclusion in the Work by the copyright owner
52 or by an individual or Legal Entity authorized to submit on behalf of
53 the copyright owner. For the purposes of this definition, "submitted"
54 means any form of electronic, verbal, or written communication sent
55 to the Licensor or its representatives, including but not limited to
56 communication on electronic mailing lists, source code control systems,
57 and issue tracking systems that are managed by, or on behalf of, the
58 Licensor for the purpose of discussing and improving the Work, but
59 excluding communication that is conspicuously marked or otherwise
60 designated in writing by the copyright owner as "Not a Contribution."
61
62 "Contributor" shall mean Licensor and any individual or Legal Entity
63 on behalf of whom a Contribution has been received by Licensor and
64 subsequently incorporated within the Work.
65
662. Grant of Copyright License. Subject to the terms and conditions of
67 this License, each Contributor hereby grants to You a perpetual,
68 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 copyright license to reproduce, prepare Derivative Works of,
70 publicly display, publicly perform, sublicense, and distribute the
71 Work and such Derivative Works in Source or Object form.
72
733. Grant of Patent License. Subject to the terms and conditions of
74 this License, each Contributor hereby grants to You a perpetual,
75 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 (except as stated in this section) patent license to make, have made,
77 use, offer to sell, sell, import, and otherwise transfer the Work,
78 where such license applies only to those patent claims licensable
79 by such Contributor that are necessarily infringed by their
80 Contribution(s) alone or by combination of their Contribution(s)
81 with the Work to which such Contribution(s) was submitted. If You
82 institute patent litigation against any entity (including a
83 cross-claim or counterclaim in a lawsuit) alleging that the Work
84 or a Contribution incorporated within the Work constitutes direct
85 or contributory patent infringement, then any patent licenses
86 granted to You under this License for that Work shall terminate
87 as of the date such litigation is filed.
88
894. Redistribution. You may reproduce and distribute copies of the
90 Work or Derivative Works thereof in any medium, with or without
91 modifications, and in Source or Object form, provided that You
92 meet the following conditions:
93
94 (a) You must give any other recipients of the Work or
95 Derivative Works a copy of this License; and
96
97 (b) You must cause any modified files to carry prominent notices
98 stating that You changed the files; and
99
100 (c) You must retain, in the Source form of any Derivative Works
101 that You distribute, all copyright, patent, trademark, and
102 attribution notices from the Source form of the Work,
103 excluding those notices that do not pertain to any part of
104 the Derivative Works; and
105
106 (d) If the Work includes a "NOTICE" text file as part of its
107 distribution, then any Derivative Works that You distribute must
108 include a readable copy of the attribution notices contained
109 within such NOTICE file, excluding those notices that do not
110 pertain to any part of the Derivative Works, in at least one
111 of the following places: within a NOTICE text file distributed
112 as part of the Derivative Works; within the Source form or
113 documentation, if provided along with the Derivative Works; or,
114 within a display generated by the Derivative Works, if and
115 wherever such third-party notices normally appear. The contents
116 of the NOTICE file are for informational purposes only and
117 do not modify the License. You may add Your own attribution
118 notices within Derivative Works that You distribute, alongside
119 or as an addendum to the NOTICE text from the Work, provided
120 that such additional attribution notices cannot be construed
121 as modifying the License.
122
123 You may add Your own copyright statement to Your modifications and
124 may provide additional or different license terms and conditions
125 for use, reproduction, or distribution of Your modifications, or
126 for any such Derivative Works as a whole, provided Your use,
127 reproduction, and distribution of the Work otherwise complies with
128 the conditions stated in this License.
129
1305. Submission of Contributions. Unless You explicitly state otherwise,
131 any Contribution intentionally submitted for inclusion in the Work
132 by You to the Licensor shall be under the terms and conditions of
133 this License, without any additional terms or conditions.
134 Notwithstanding the above, nothing herein shall supersede or modify
135 the terms of any separate license agreement you may have executed
136 with Licensor regarding such Contributions.
137
1386. Trademarks. This License does not grant permission to use the trade
139 names, trademarks, service marks, or product names of the Licensor,
140 except as required for reasonable and customary use in describing the
141 origin of the Work and reproducing the content of the NOTICE file.
142
1437. Disclaimer of Warranty. Unless required by applicable law or
144 agreed to in writing, Licensor provides the Work (and each
145 Contributor provides its Contributions) on an "AS IS" BASIS,
146 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 implied, including, without limitation, any warranties or conditions
148 of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 PARTICULAR PURPOSE. You are solely responsible for determining the
150 appropriateness of using or redistributing the Work and assume any
151 risks associated with Your exercise of permissions under this License.
152
1538. Limitation of Liability. In no event and under no legal theory,
154 whether in tort (including negligence), contract, or otherwise,
155 unless required by applicable law (such as deliberate and grossly
156 negligent acts) or agreed to in writing, shall any Contributor be
157 liable to You for damages, including any direct, indirect, special,
158 incidental, or consequential damages of any character arising as a
159 result of this License or out of the use or inability to use the
160 Work (including but not limited to damages for loss of goodwill,
161 work stoppage, computer failure or malfunction, or any and all
162 other commercial damages or losses), even if such Contributor
163 has been advised of the possibility of such damages.
164
1659. Accepting Warranty or Additional Liability. While redistributing
166 the Work or Derivative Works thereof, You may choose to offer,
167 and charge a fee for, acceptance of support, warranty, indemnity,
168 or other liability obligations and/or rights consistent with this
169 License. However, in accepting such obligations, You may act only
170 on Your own behalf and on Your sole responsibility, not on behalf
171 of any other Contributor, and only if You agree to indemnify,
172 defend, and hold each Contributor harmless for any liability
173 incurred by, or claims asserted against, such Contributor by reason
174 of your accepting any such warranty or additional liability.
175
176END OF TERMS AND CONDITIONS
177
178APPENDIX: How to apply the Apache License to your work.
179
180 To apply the Apache License to your work, attach the following
181 boilerplate notice, with the fields enclosed by brackets "[]"
182 replaced with your own identifying information. (Don't include
183 the brackets!) The text should be enclosed in the appropriate
184 comment syntax for the file format. We also recommend that a
185 file or class name and description of purpose be included on the
186 same "printed page" as the copyright notice for easier
187 identification within third-party archives.
188
189Copyright [yyyy] [name of copyright owner]
190
191Licensed under the Apache License, Version 2.0 (the "License");
192you may not use this file except in compliance with the License.
193You may obtain a copy of the License at
194
195 http://www.apache.org/licenses/LICENSE-2.0
196
197Unless required by applicable law or agreed to in writing, software
198distributed under the License is distributed on an "AS IS" BASIS,
199WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200See the License for the specific language governing permissions and
201limitations under the License.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
new file mode 100644
index 0000000000..c4a9a58791
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/LICENSE-MIT
@@ -0,0 +1,25 @@
1Copyright (c) 2018 konstin
2
3Permission is hereby granted, free of charge, to any
4person obtaining a copy of this software and associated
5documentation files (the "Software"), to deal in the
6Software without restriction, including without
7limitation the rights to use, copy, modify, merge,
8publish, distribute, sublicense, and/or sell copies of
9the Software, and to permit persons to whom the Software
10is furnished to do so, subject to the following
11conditions:
12
13The above copyright notice and this permission notice
14shall be included in all copies or substantial portions
15of the Software.
16
17THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
18ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
19TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
20PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
21SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
22CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
24IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25DEALINGS IN THE SOFTWARE.
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
new file mode 100644
index 0000000000..ff35abc472
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/pyproject.toml
@@ -0,0 +1,8 @@
1[build-system]
2requires = ["maturin>=1.0,<2.0"]
3build-backend = "maturin"
4
5[tool.maturin]
6# "extension-module" tells pyo3 we want to build an extension module (skips linking against libpython.so)
7features = ["pyo3/extension-module"]
8
diff --git a/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
new file mode 100644
index 0000000000..6828466ed1
--- /dev/null
+++ b/meta/lib/oeqa/files/maturin/guessing-game/src/lib.rs
@@ -0,0 +1,48 @@
1use pyo3::prelude::*;
2use rand::Rng;
3use std::cmp::Ordering;
4use std::io;
5
6#[pyfunction]
7fn guess_the_number() {
8 println!("Guess the number!");
9
10 let secret_number = rand::thread_rng().gen_range(1..101);
11
12 loop {
13 println!("Please input your guess.");
14
15 let mut guess = String::new();
16
17 io::stdin()
18 .read_line(&mut guess)
19 .expect("Failed to read line");
20
21 let guess: u32 = match guess.trim().parse() {
22 Ok(num) => num,
23 Err(_) => continue,
24 };
25
26 println!("You guessed: {}", guess);
27
28 match guess.cmp(&secret_number) {
29 Ordering::Less => println!("Too small!"),
30 Ordering::Greater => println!("Too big!"),
31 Ordering::Equal => {
32 println!("You win!");
33 break;
34 }
35 }
36 }
37}
38
39/// A Python module implemented in Rust. The name of this function must match
40/// the `lib.name` setting in the `Cargo.toml`, else Python will not be able to
41/// import the module.
42#[pymodule]
43fn guessing_game(_py: Python, m: &PyModule) -> PyResult<()> {
44 m.add_function(wrap_pyfunction!(guess_the_number, m)?)?;
45
46 Ok(())
47}
48
diff --git a/meta/lib/oeqa/files/test.rs b/meta/lib/oeqa/files/test.rs
new file mode 100644
index 0000000000..f79c691f08
--- /dev/null
+++ b/meta/lib/oeqa/files/test.rs
@@ -0,0 +1,2 @@
1fn main() {
2}
diff --git a/meta/lib/oeqa/files/testresults/testresults.json b/meta/lib/oeqa/files/testresults/testresults.json
index 1a62155618..86e5e412af 100644
--- a/meta/lib/oeqa/files/testresults/testresults.json
+++ b/meta/lib/oeqa/files/testresults/testresults.json
@@ -1,5 +1,5 @@
1{ 1{
2 "runtime_core-image-minimal_qemuarm_20181225195701": { 2 "runtime_core-image-minimal:qemuarm_20181225195701": {
3 "configuration": { 3 "configuration": {
4 "DISTRO": "poky", 4 "DISTRO": "poky",
5 "HOST_DISTRO": "ubuntu-16.04", 5 "HOST_DISTRO": "ubuntu-16.04",
diff --git a/meta/lib/oeqa/manual/bsp-hw.json b/meta/lib/oeqa/manual/bsp-hw.json
index 75b89758cb..308a0807f3 100644
--- a/meta/lib/oeqa/manual/bsp-hw.json
+++ b/meta/lib/oeqa/manual/bsp-hw.json
@@ -26,7 +26,7 @@
26 "expected_results": "" 26 "expected_results": ""
27 }, 27 },
28 "5": { 28 "5": {
29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was whitelisted according to bug 9652.", 29 "action": "Remove USB, and reboot into new installed system. \nNote: If installation was successfully completed and received this message \"\"(sdx): Volume was not properly unmounted...Please run fsck.\"\" ignore it because this was allowed according to bug 9652.",
30 "expected_results": "" 30 "expected_results": ""
31 } 31 }
32 }, 32 },
@@ -61,70 +61,6 @@
61 }, 61 },
62 { 62 {
63 "test": { 63 "test": {
64 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_3",
65 "author": [
66 {
67 "email": "alexandru.c.georgescu@intel.com",
68 "name": "alexandru.c.georgescu@intel.com"
69 }
70 ],
71 "execution": {
72 "1": {
73 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 3 by default, this is done by changing the line \n\n\nid:5:initdefault \n\nto \n\nid:3:initdefault \n\n",
74 "expected_results": ""
75 },
76 "2": {
77 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
78 "expected_results": ""
79 },
80 "3": {
81 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
82 "expected_results": ""
83 },
84 "4": {
85 "action": "Press \"F10\" or \"ctrl+x\" to boot system",
86 "expected_results": ""
87 },
88 "5": {
89 "action": "If system ask you for a login type \"root\"",
90 "expected_results": "System should boot to run level 3, showing the command prompt."
91 }
92 },
93 "summary": "boot_from_runlevel_3"
94 }
95 },
96 {
97 "test": {
98 "@alias": "bsps-hw.bsps-hw.boot_from_runlevel_5",
99 "author": [
100 {
101 "email": "alexandru.c.georgescu@intel.com",
102 "name": "alexandru.c.georgescu@intel.com"
103 }
104 ],
105 "execution": {
106 "1": {
107 "action": "Boot into system and edit /etc/inittab to make sure that system enter at the run level 5 by default, this is done by changing the line \n\nid:3:initdefault \n\nto \n\nid:5:initdefault \n\n",
108 "expected_results": ""
109 },
110 "2": {
111 "action": "Reboot system, and press \"Tab\" to enter \"grub\"",
112 "expected_results": ""
113 },
114 "3": {
115 "action": "Get into the \"kernel\" line with the edit option \"e\" and add \"psplash=false text\" at the end line.",
116 "expected_results": ""
117 },
118 "4": {
119 "action": "Press \"F10\" or \"ctrl+x\" to boot system \nNote: The test is only for sato image.",
120 "expected_results": "System should boot to runlevel 5 ."
121 }
122 },
123 "summary": "boot_from_runlevel_5"
124 }
125 },
126 {
127 "test": {
128 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop", 64 "@alias": "bsps-hw.bsps-hw.switch_among_multi_applications_and_desktop",
129 "author": [ 65 "author": [
130 { 66 {
@@ -155,70 +91,6 @@
155 }, 91 },
156 { 92 {
157 "test": { 93 "test": {
158 "@alias": "bsps-hw.bsps-hw.ethernet_static_ip_set_in_connman",
159 "author": [
160 {
161 "email": "alexandru.c.georgescu@intel.com",
162 "name": "alexandru.c.georgescu@intel.com"
163 }
164 ],
165 "execution": {
166 "1": {
167 "action": "Boot the system and check internet connection is on . ",
168 "expected_results": ""
169 },
170 "2": {
171 "action": "Launch connmand-properties (up-right corner on desktop)",
172 "expected_results": ""
173 },
174 "3": {
175 "action": "Choose Ethernet device and set a valid static ip address for it. \nFor example, in our internal network, we can set as following: \nip address: 10.239.48.xxx \nMask: 255.255.255.0 \nGateway (Broadcast): 10.239.48.255",
176 "expected_results": ""
177 },
178 "4": {
179 "action": "Check the Network configuration with \"ifconfig\"",
180 "expected_results": "Static IP was set successfully \n"
181 },
182 "5": {
183 "action": "ping to another IP adress",
184 "expected_results": "Ping works correclty\n"
185 }
186 },
187 "summary": "ethernet_static_ip_set_in_connman"
188 }
189 },
190 {
191 "test": {
192 "@alias": "bsps-hw.bsps-hw.ethernet_get_IP_in_connman_via_DHCP",
193 "author": [
194 {
195 "email": "alexandru.c.georgescu@intel.com",
196 "name": "alexandru.c.georgescu@intel.com"
197 }
198 ],
199 "execution": {
200 "1": {
201 "action": "Launch connmand-properties (up-right corner on your desktop). ",
202 "expected_results": ""
203 },
204 "2": {
205 "action": "Check if Ethernet device can work properly with static IP, doing \"ping XXX.XXX.XXX.XXX\", once this is set.",
206 "expected_results": "Ping executed successfully . \n\n"
207 },
208 "3": {
209 "action": "Then choose DHCP method for Ethernet device in connmand-properties.",
210 "expected_results": ""
211 },
212 "4": {
213 "action": "Check with 'ifconfig\" and \"ping\" if Ethernet device get IP address via DHCP.",
214 "expected_results": "Ethernet device can get dynamic IP address via DHCP in connmand ."
215 }
216 },
217 "summary": "ethernet_get_IP_in_connman_via_DHCP"
218 }
219 },
220 {
221 "test": {
222 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome", 94 "@alias": "bsps-hw.bsps-hw.connman_offline_mode_in_connman-gnome",
223 "author": [ 95 "author": [
224 { 96 {
@@ -241,40 +113,6 @@
241 }, 113 },
242 { 114 {
243 "test": { 115 "test": {
244 "@alias": "bsps-hw.bsps-hw.standby",
245 "author": [
246 {
247 "email": "alexandru.c.georgescu@intel.com",
248 "name": "alexandru.c.georgescu@intel.com"
249 }
250 ],
251 "execution": {
252 "1": {
253 "action": "boot system and launch terminal; check output of \"date\" and launch script \"continue.sh\"",
254 "expected_results": ""
255 },
256 "2": {
257 "action": "echo \"mem\" > /sys/power/state",
258 "expected_results": ""
259 },
260 "3": {
261 "action": "After system go into S3 mode, move mouse or press any key to make it resume (on NUC press power button)",
262 "expected_results": ""
263 },
264 "4": {
265 "action": "Check \"date\" and script \"continue.sh\"",
266 "expected_results": ""
267 },
268 "5": {
269 "action": "Check if application can work as normal \ncontinue.sh as below: \n \n#!/bin/sh \n \ni=1 \nwhile [ 0 ] \ndo \n echo $i \n sleep 1 \n i=$((i+1)) \ndone ",
270 "expected_results": "Screen should resume back and script can run continuously incrementing the i's value from where it was before going to standby state. Date should be the same with the corresponding time increment."
271 }
272 },
273 "summary": "standby"
274 }
275 },
276 {
277 "test": {
278 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby", 116 "@alias": "bsps-hw.bsps-hw.check_CPU_utilization_after_standby",
279 "author": [ 117 "author": [
280 { 118 {
@@ -305,88 +143,6 @@
305 }, 143 },
306 { 144 {
307 "test": { 145 "test": {
308 "@alias": "bsps-hw.bsps-hw.Test_if_LAN_device_works_well_after_resume_from_suspend_state",
309 "author": [
310 {
311 "email": "alexandru.c.georgescu@intel.com",
312 "name": "alexandru.c.georgescu@intel.com"
313 }
314 ],
315 "execution": {
316 "1": {
317 "action": "boot system and launch terminal",
318 "expected_results": ""
319 },
320 "2": {
321 "action": "echo \"mem\" > /sys/power/state",
322 "expected_results": ""
323 },
324 "3": {
325 "action": "After system go into S3 mode, move mouse or press any key to make it resume",
326 "expected_results": ""
327 },
328 "4": {
329 "action": "check ping status \n\nNote: This TC apply only for core-image-full-cmd.",
330 "expected_results": "ping should always work before/after standby"
331 }
332 },
333 "summary": "Test_if_LAN_device_works_well_after_resume_from_suspend_state"
334 }
335 },
336 {
337 "test": {
338 "@alias": "bsps-hw.bsps-hw.Test_if_usb_hid_device_works_well_after_resume_from_suspend_state",
339 "author": [
340 {
341 "email": "alexandru.c.georgescu@intel.com",
342 "name": "alexandru.c.georgescu@intel.com"
343 }
344 ],
345 "execution": {
346 "1": {
347 "action": "boot system and launch terminal",
348 "expected_results": ""
349 },
350 "2": {
351 "action": "echo \"mem\" > /sys/power/state",
352 "expected_results": ""
353 },
354 "3": {
355 "action": "After system go into S3 mode, resume the device by pressing the power button or using HID devices",
356 "expected_results": "Devices resumes "
357 },
358 "4": {
359 "action": "check usb mouse and keyboard",
360 "expected_results": "Usb mouse and keyboard should work"
361 }
362 },
363 "summary": "Test_if_usb_hid_device_works_well_after_resume_from_suspend_state"
364 }
365 },
366 {
367 "test": {
368 "@alias": "bsps-hw.bsps-hw.click_terminal_icon_on_X_desktop",
369 "author": [
370 {
371 "email": "alexandru.c.georgescu@intel.com",
372 "name": "alexandru.c.georgescu@intel.com"
373 }
374 ],
375 "execution": {
376 "1": {
377 "action": "After system launch and X start up, click terminal icon on desktop",
378 "expected_results": ""
379 },
380 "2": {
381 "action": "Check if only one terminal window launched and no other problem met",
382 "expected_results": "There should be no problem after launching terminal . "
383 }
384 },
385 "summary": "click_terminal_icon_on_X_desktop"
386 }
387 },
388 {
389 "test": {
390 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player", 146 "@alias": "bsps-hw.bsps-hw.Add_multiple_files_in_media_player",
391 "author": [ 147 "author": [
392 { 148 {
@@ -839,40 +595,6 @@
839 }, 595 },
840 { 596 {
841 "test": { 597 "test": {
842 "@alias": "bsps-hw.bsps-hw.Check_if_RTC_(Real_Time_Clock)_can_work_correctly",
843 "author": [
844 {
845 "email": "yi.zhao@windriver.com",
846 "name": "yi.zhao@windriver.com"
847 }
848 ],
849 "execution": {
850 "1": {
851 "action": "Read time from RTC registers. root@localhost:/root> hwclock -r Sun Mar 22 04:05:47 1970 -0.001948 seconds ",
852 "expected_results": "Can read and set the time from RTC.\n"
853 },
854 "2": {
855 "action": "Set system current time root@localhost:/root> date 062309452008 ",
856 "expected_results": ""
857 },
858 "3": {
859 "action": "Synchronize the system current time to RTC registers root@localhost:/root> hwclock -w ",
860 "expected_results": ""
861 },
862 "4": {
863 "action": "Read time from RTC registers root@localhost:/root> hwclock -r ",
864 "expected_results": ""
865 },
866 "5": {
867 "action": "Reboot target and read time from RTC again\n",
868 "expected_results": ""
869 }
870 },
871 "summary": "Check_if_RTC_(Real_Time_Clock)_can_work_correctly"
872 }
873 },
874 {
875 "test": {
876 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS", 598 "@alias": "bsps-hw.bsps-hw.System_can_boot_up_via_NFS",
877 "author": [ 599 "author": [
878 { 600 {
diff --git a/meta/lib/oeqa/manual/build-appliance.json b/meta/lib/oeqa/manual/build-appliance.json
index 70f8c72c9b..82a556e93e 100644
--- a/meta/lib/oeqa/manual/build-appliance.json
+++ b/meta/lib/oeqa/manual/build-appliance.json
@@ -48,7 +48,7 @@
48 "expected_results": "" 48 "expected_results": ""
49 }, 49 },
50 "3": { 50 "3": {
51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL_append = \" acpid\"", 51 "action": "Install a new package to the image, for example, acpid. Set the following line in conf/local.conf: IMAGE_INSTALL:append = \" acpid\"",
52 "expected_results": "" 52 "expected_results": ""
53 }, 53 },
54 "4": { 54 "4": {
diff --git a/meta/lib/oeqa/manual/eclipse-plugin.json b/meta/lib/oeqa/manual/eclipse-plugin.json
index d77d0e673b..6c110d0656 100644
--- a/meta/lib/oeqa/manual/eclipse-plugin.json
+++ b/meta/lib/oeqa/manual/eclipse-plugin.json
@@ -44,7 +44,7 @@
44 "expected_results": "" 44 "expected_results": ""
45 }, 45 },
46 "2": { 46 "2": {
47 "action": "wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/qemu (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n", 47 "action": "wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/ (ex:core-image-sato-sdk-qemux86-date-rootfs-tar-bz2) \nsource /opt/poky/version/environment-setup-i585-poky-linux \n\nExtract qemu with runqemu-extract-sdk /home/user/file(ex.core-image-sato-sdk-qemux86.bz2) \n/home/user/qemux86-sato-sdk \n\n",
48 "expected_results": " Qemu can be lauched normally." 48 "expected_results": " Qemu can be lauched normally."
49 }, 49 },
50 "3": { 50 "3": {
@@ -60,7 +60,7 @@
60 "expected_results": "" 60 "expected_results": ""
61 }, 61 },
62 "6": { 62 "6": {
63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget autobuilder.yoctoproject.org/pub/releases//machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n", 63 "action": "(d) QEMU: \nSelect this option if you will be using the QEMU emulator. Specify the Kernel matching the QEMU architecture you are using. \n wget https://downloads.yoctoproject.org/releases/yocto/yocto-$VERSION/machines/qemu/qemux86/bzImage-qemux86.bin \n e.g: /home/$USER/yocto/adt-installer/download_image/bzImage-qemux86.bin \n\n",
64 "expected_results": "" 64 "expected_results": ""
65 }, 65 },
66 "7": { 66 "7": {
@@ -247,7 +247,7 @@
247 "execution": { 247 "execution": {
248 "1": { 248 "1": {
249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n", 249 "action": "Clone eclipse-poky source. \n \n - git clone git://git.yoctoproject.org/eclipse-poky \n\n",
250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on http://autobuilder.yoctoproject.org/pub/releases/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n" 250 "expected_results": "Eclipse plugin is successfully installed \n\nDocumentation is there. For example if you have release yocto-2.0.1 you will found on https://downloads.yoctoproject.org/releases/yocto/yocto-2.0.1/eclipse-plugin/mars/ archive with documentation like org.yocto.doc-development-$date.zip \n \n"
251 }, 251 },
252 "2": { 252 "2": {
253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n", 253 "action": "Checkout correct tag. \n\n - git checkout <eclipse-version>/<yocto-version> \n\n",
diff --git a/meta/lib/oeqa/manual/sdk.json b/meta/lib/oeqa/manual/sdk.json
index 434982f7f5..21d892d26d 100644
--- a/meta/lib/oeqa/manual/sdk.json
+++ b/meta/lib/oeqa/manual/sdk.json
@@ -26,7 +26,7 @@
26 "expected_results": "Expect both qemu to boot up successfully." 26 "expected_results": "Expect both qemu to boot up successfully."
27 } 27 }
28 }, 28 },
29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for_x86" 29 "summary": "test_install_cross_toolchain_can_run_multiple_qemu_for:x86"
30 } 30 }
31 } 31 }
32] \ No newline at end of file 32] \ No newline at end of file
diff --git a/meta/lib/oeqa/manual/toaster-managed-mode.json b/meta/lib/oeqa/manual/toaster-managed-mode.json
index 12374c7c64..1a71985c3c 100644
--- a/meta/lib/oeqa/manual/toaster-managed-mode.json
+++ b/meta/lib/oeqa/manual/toaster-managed-mode.json
@@ -136,7 +136,7 @@
136 "expected_results": "" 136 "expected_results": ""
137 }, 137 },
138 "3": { 138 "3": {
139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL_append - \"Not set\" \n\tPACKAGE_CLASES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n", 139 "action": "Check that default values are as follows: \n\tDISTRO - poky \n\tIMAGE_FSTYPES - ext3 jffs2 tar.bz2 \n\tIMAGE_INSTALL:append - \"Not set\" \n\tPACKAGE_CLASSES - package_rpm \n SSTATE_DIR - /homeDirectory/poky/sstate-cache \n\n",
140 "expected_results": "" 140 "expected_results": ""
141 }, 141 },
142 "4": { 142 "4": {
@@ -186,7 +186,7 @@
186 "expected_results": "" 186 "expected_results": ""
187 }, 187 },
188 "7": { 188 "7": {
189 "action": "IMAGE_INSTALL_append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n", 189 "action": "IMAGE_INSTALL:append: \n\t- check that the \"change\" icon is present (represented by a pen icon) \n\t- click on the \"change\" icon and check that the variable becomes a text field, populated with the current value of the variable. \n\n\t- check that the save button is disabled when the text field is empty \n\t- insert test in the text field (for example \"package1\") and hit save; be aware that there is no input validation for this variable \n\t- check that a new \"delete\" icon(a trashcan) has appeared next to the pen icon \n\t- check that clicking on the trashcan icon resets the value to \"Not set\" and makes the trashcan icon dissapear \n\n",
190 "expected_results": "" 190 "expected_results": ""
191 }, 191 },
192 "8": { 192 "8": {
@@ -1574,7 +1574,7 @@
1574 "expected_results": "Open bitbake variables page. \n\n\t" 1574 "expected_results": "Open bitbake variables page. \n\n\t"
1575 }, 1575 },
1576 "5": { 1576 "5": {
1577 "action": "Click on change button for IMAGE_INSTALL_append and add a variable (ex: acpid). \n\n", 1577 "action": "Click on change button for IMAGE_INSTALL:append and add a variable (ex: acpid). \n\n",
1578 "expected_results": "Variable added. \n\n\t" 1578 "expected_results": "Variable added. \n\n\t"
1579 }, 1579 },
1580 "6": { 1580 "6": {
@@ -1590,7 +1590,7 @@
1590 "expected_results": "You should get results for ssh packages." 1590 "expected_results": "You should get results for ssh packages."
1591 } 1591 }
1592 }, 1592 },
1593 "summary": "Test_IMAGE_INSTALL_append_variable" 1593 "summary": "Test_IMAGE_INSTALL:append_variable"
1594 } 1594 }
1595 }, 1595 },
1596 { 1596 {
@@ -2348,7 +2348,7 @@
2348 "expected_results": "" 2348 "expected_results": ""
2349 }, 2349 },
2350 "3": { 2350 "3": {
2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. ", 2351 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. ",
2352 "expected_results": " All recipes are built correctly \n\n" 2352 "expected_results": " All recipes are built correctly \n\n"
2353 }, 2353 },
2354 "4": { 2354 "4": {
@@ -2382,7 +2382,7 @@
2382 "expected_results": "" 2382 "expected_results": ""
2383 }, 2383 },
2384 "3": { 2384 "3": {
2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2385 "action": "Build 6 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2386 "expected_results": "All recipes are built correctly \n\n" 2386 "expected_results": "All recipes are built correctly \n\n"
2387 }, 2387 },
2388 "4": { 2388 "4": {
@@ -2420,7 +2420,7 @@
2420 "expected_results": "" 2420 "expected_results": ""
2421 }, 2421 },
2422 "3": { 2422 "3": {
2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base, core-image-clutter) to name a few. \n\n", 2423 "action": "Build 4 recipes example (core-image-sato, core-image-minimal, core-image-base) to name a few. \n\n",
2424 "expected_results": " All recipes are built correctly \n\n" 2424 "expected_results": " All recipes are built correctly \n\n"
2425 }, 2425 },
2426 "4": { 2426 "4": {
@@ -2569,4 +2569,4 @@
2569 "summary": "Download_task_log" 2569 "summary": "Download_task_log"
2570 } 2570 }
2571 } 2571 }
2572] \ No newline at end of file 2572]
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 9c84466dd0..bcb6a878c7 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -28,7 +28,7 @@ try:
28 import oeqa.sdkext 28 import oeqa.sdkext
29except ImportError: 29except ImportError:
30 pass 30 pass
31from oeqa.utils.decorators import LogResults, gettag, getResults 31from oeqa.utils.decorators import LogResults, gettag
32 32
33logger = logging.getLogger("BitBake") 33logger = logging.getLogger("BitBake")
34 34
@@ -57,7 +57,6 @@ def filterByTagExp(testsuite, tagexp):
57@LogResults 57@LogResults
58class oeTest(unittest.TestCase): 58class oeTest(unittest.TestCase):
59 59
60 pscmd = "ps"
61 longMessage = True 60 longMessage = True
62 61
63 @classmethod 62 @classmethod
@@ -110,20 +109,6 @@ class oeRuntimeTest(oeTest):
110 def tearDown(self): 109 def tearDown(self):
111 # Uninstall packages in the DUT 110 # Uninstall packages in the DUT
112 self.tc.install_uninstall_packages(self.id(), False) 111 self.tc.install_uninstall_packages(self.id(), False)
113
114 res = getResults()
115 # If a test fails or there is an exception dump
116 # for QemuTarget only
117 if (type(self.target).__name__ == "QemuTarget" and
118 (self.id() in res.getErrorList() or
119 self.id() in res.getFailList())):
120 self.tc.host_dumper.create_dir(self._testMethodName)
121 self.tc.host_dumper.dump_host()
122 self.target.target_dumper.dump_target(
123 self.tc.host_dumper.dump_dir)
124 print ("%s dump data stored in %s" % (self._testMethodName,
125 self.tc.host_dumper.dump_dir))
126
127 self.tearDownLocal() 112 self.tearDownLocal()
128 113
129 # Method to be run after tearDown and implemented by child classes 114 # Method to be run after tearDown and implemented by child classes
@@ -256,7 +241,7 @@ class TestContext(object):
256 241
257 modules = [] 242 modules = []
258 for test in self.testslist: 243 for test in self.testslist:
259 if re.search("\w+\.\w+\.test_\S+", test): 244 if re.search(r"\w+\.\w+\.test_\S+", test):
260 test = '.'.join(t.split('.')[:3]) 245 test = '.'.join(t.split('.')[:3])
261 module = pkgutil.get_loader(test) 246 module = pkgutil.get_loader(test)
262 modules.append(module) 247 modules.append(module)
@@ -398,11 +383,6 @@ class RuntimeTestContext(TestContext):
398 def _get_test_suites_required(self): 383 def _get_test_suites_required(self):
399 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"] 384 return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"]
400 385
401 def loadTests(self):
402 super(RuntimeTestContext, self).loadTests()
403 if oeTest.hasPackage("procps"):
404 oeRuntimeTest.pscmd = "ps -ef"
405
406 def extract_packages(self): 386 def extract_packages(self):
407 """ 387 """
408 Find packages that will be needed during runtime. 388 Find packages that will be needed during runtime.
diff --git a/meta/lib/oeqa/runtime/cases/_qemutiny.py b/meta/lib/oeqa/runtime/cases/_qemutiny.py
index 6886e36502..816fd4a7cb 100644
--- a/meta/lib/oeqa/runtime/cases/_qemutiny.py
+++ b/meta/lib/oeqa/runtime/cases/_qemutiny.py
@@ -1,12 +1,19 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.runtime.case import OERuntimeTestCase 7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.target.qemu import OEQemuTarget
6 9
7class QemuTinyTest(OERuntimeTestCase): 10class QemuTinyTest(OERuntimeTestCase):
8 11
9 def test_boot_tiny(self): 12 def test_boot_tiny(self):
10 status, output = self.target.run_serial('uname -a') 13 # Until the target has explicit run_serial support, check that the
11 msg = "Cannot detect poky tiny boot!" 14 # target is the qemu runner
12 self.assertTrue("yocto-tiny" in output, msg) 15 if isinstance(self.target, OEQemuTarget):
16 status, output = self.target.runner.run_serial('uname -a')
17 self.assertIn("Linux", output)
18 else:
19 self.skipTest("Target %s is not OEQemuTarget" % self.target)
diff --git a/meta/lib/oeqa/runtime/cases/apt.py b/meta/lib/oeqa/runtime/cases/apt.py
index 53745df93f..8000645843 100644
--- a/meta/lib/oeqa/runtime/cases/apt.py
+++ b/meta/lib/oeqa/runtime/cases/apt.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -21,7 +23,7 @@ class AptRepoTest(AptTest):
21 23
22 @classmethod 24 @classmethod
23 def setUpClass(cls): 25 def setUpClass(cls):
24 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], 'all') 26 service_repo = os.path.join(cls.tc.td['DEPLOY_DIR_DEB'], '')
25 cls.repo_server = HTTPService(service_repo, 27 cls.repo_server = HTTPService(service_repo,
26 '0.0.0.0', port=cls.tc.target.server_port, 28 '0.0.0.0', port=cls.tc.target.server_port,
27 logger=cls.tc.logger) 29 logger=cls.tc.logger)
@@ -34,20 +36,44 @@ class AptRepoTest(AptTest):
34 def setup_source_config_for_package_install(self): 36 def setup_source_config_for_package_install(self):
35 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port) 37 apt_get_source_server = 'http://%s:%s/' % (self.tc.target.server_ip, self.repo_server.port)
36 apt_get_sourceslist_dir = '/etc/apt/' 38 apt_get_sourceslist_dir = '/etc/apt/'
37 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server)) 39 self.target.run('cd %s; echo deb [ allow-insecure=yes ] %s/all ./ > sources.list' % (apt_get_sourceslist_dir, apt_get_source_server))
40
41 def setup_source_config_for_package_install_signed(self):
42 apt_get_source_server = 'http://%s:%s' % (self.tc.target.server_ip, self.repo_server.port)
43 apt_get_sourceslist_dir = '/etc/apt/'
44 self.target.run("cd %s; cp sources.list sources.list.bak; sed -i 's|\[trusted=yes\] http://bogus_ip:bogus_port|%s|g' sources.list" % (apt_get_sourceslist_dir, apt_get_source_server))
38 45
39 def cleanup_source_config_for_package_install(self): 46 def cleanup_source_config_for_package_install(self):
40 apt_get_sourceslist_dir = '/etc/apt/' 47 apt_get_sourceslist_dir = '/etc/apt/'
41 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir)) 48 self.target.run('cd %s; rm sources.list' % (apt_get_sourceslist_dir))
42 49
50 def cleanup_source_config_for_package_install_signed(self):
51 apt_get_sourceslist_dir = '/etc/apt/'
52 self.target.run('cd %s; mv sources.list.bak sources.list' % (apt_get_sourceslist_dir))
53
54 def setup_key(self):
55 # the key is found on the target /etc/pki/packagefeed-gpg/
56 # named PACKAGEFEED-GPG-KEY-poky-branch
57 self.target.run('cd %s; apt-key add P*' % ('/etc/pki/packagefeed-gpg'))
58
43 @skipIfNotFeature('package-management', 59 @skipIfNotFeature('package-management',
44 'Test requires package-management to be in IMAGE_FEATURES') 60 'Test requires package-management to be in IMAGE_FEATURES')
45 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb', 61 @skipIfNotDataVar('IMAGE_PKGTYPE', 'deb',
46 'DEB is not the primary package manager') 62 'DEB is not the primary package manager')
47 @OEHasPackage(['apt']) 63 @OEHasPackage(['apt'])
48 def test_apt_install_from_repo(self): 64 def test_apt_install_from_repo(self):
49 self.setup_source_config_for_package_install() 65 if not self.tc.td.get('PACKAGE_FEED_GPG_NAME'):
50 self.pkg('update') 66 self.setup_source_config_for_package_install()
51 self.pkg('remove --yes run-postinsts-dev') 67 self.pkg('update')
52 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev') 68 self.pkg('remove --yes run-postinsts-dev')
53 self.cleanup_source_config_for_package_install() 69 self.pkg('install --yes --allow-unauthenticated run-postinsts-dev')
70 self.cleanup_source_config_for_package_install()
71 else:
72 # when we are here a key has been set to sign the package feed and
73 # public key and gnupg installed on the image by test_testimage_apt
74 self.setup_source_config_for_package_install_signed()
75 self.setup_key()
76 self.pkg('update')
77 self.pkg('install --yes run-postinsts-dev')
78 self.pkg('remove --yes run-postinsts-dev')
79 self.cleanup_source_config_for_package_install_signed()
diff --git a/meta/lib/oeqa/runtime/cases/boot.py b/meta/lib/oeqa/runtime/cases/boot.py
index 2142f400a0..dcee3311f7 100644
--- a/meta/lib/oeqa/runtime/cases/boot.py
+++ b/meta/lib/oeqa/runtime/cases/boot.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.core.decorator.data import skipIfQemu
13class BootTest(OERuntimeTestCase): 15class BootTest(OERuntimeTestCase):
14 16
15 @OETimeout(120) 17 @OETimeout(120)
16 @skipIfQemu('qemuall', 'Test only runs on real hardware') 18 @skipIfQemu()
17 @OETestDepends(['ssh.SSHTest.test_ssh']) 19 @OETestDepends(['ssh.SSHTest.test_ssh'])
18 def test_reboot(self): 20 def test_reboot(self):
19 output = '' 21 output = ''
diff --git a/meta/lib/oeqa/runtime/cases/buildcpio.py b/meta/lib/oeqa/runtime/cases/buildcpio.py
index e29bf16ccb..7be734cb4f 100644
--- a/meta/lib/oeqa/runtime/cases/buildcpio.py
+++ b/meta/lib/oeqa/runtime/cases/buildcpio.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,7 +14,7 @@ class BuildCpioTest(OERuntimeTestCase):
12 14
13 @classmethod 15 @classmethod
14 def setUpClass(cls): 16 def setUpClass(cls):
15 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.13.tar.gz' 17 uri = 'https://downloads.yoctoproject.org/mirror/sources/cpio-2.15.tar.gz'
16 cls.project = TargetBuildProject(cls.tc.target, 18 cls.project = TargetBuildProject(cls.tc.target,
17 uri, 19 uri,
18 dl_dir = cls.tc.td['DL_DIR']) 20 dl_dir = cls.tc.td['DL_DIR'])
@@ -27,7 +29,6 @@ class BuildCpioTest(OERuntimeTestCase):
27 @OEHasPackage(['autoconf']) 29 @OEHasPackage(['autoconf'])
28 def test_cpio(self): 30 def test_cpio(self):
29 self.project.download_archive() 31 self.project.download_archive()
30 self.project.run_configure('--disable-maintainer-mode', 32 self.project.run_configure()
31 'sed -i -e "/char \*program_name/d" src/global.c;')
32 self.project.run_make() 33 self.project.run_make()
33 self.project.run_install() 34 self.project.run_install()
diff --git a/meta/lib/oeqa/runtime/cases/buildgalculator.py b/meta/lib/oeqa/runtime/cases/buildgalculator.py
index e5cc3e2888..2cfb3243dc 100644
--- a/meta/lib/oeqa/runtime/cases/buildgalculator.py
+++ b/meta/lib/oeqa/runtime/cases/buildgalculator.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/buildlzip.py b/meta/lib/oeqa/runtime/cases/buildlzip.py
index bc70b41461..44f4f1be71 100644
--- a/meta/lib/oeqa/runtime/cases/buildlzip.py
+++ b/meta/lib/oeqa/runtime/cases/buildlzip.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/connman.py b/meta/lib/oeqa/runtime/cases/connman.py
index f0d15fac9b..a488752e3f 100644
--- a/meta/lib/oeqa/runtime/cases/connman.py
+++ b/meta/lib/oeqa/runtime/cases/connman.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/date.py b/meta/lib/oeqa/runtime/cases/date.py
index fdd2a6ae58..a2523de67a 100644
--- a/meta/lib/oeqa/runtime/cases/date.py
+++ b/meta/lib/oeqa/runtime/cases/date.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,12 +15,12 @@ class DateTest(OERuntimeTestCase):
13 def setUp(self): 15 def setUp(self):
14 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
15 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
16 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
17 19
18 def tearDown(self): 20 def tearDown(self):
19 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
20 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
21 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
24 @OEHasPackage(['coreutils', 'busybox']) 26 @OEHasPackage(['coreutils', 'busybox'])
@@ -28,14 +30,13 @@ class DateTest(OERuntimeTestCase):
28 self.assertEqual(status, 0, msg=msg) 30 self.assertEqual(status, 0, msg=msg)
29 oldDate = output 31 oldDate = output
30 32
31 sampleDate = '"2016-08-09 10:00:00"' 33 sampleTimestamp = 1488800000
32 (status, output) = self.target.run("date -s %s" % sampleDate) 34 (status, output) = self.target.run("date -s @%d" % sampleTimestamp)
33 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) 35 self.assertEqual(status, 0, msg='Date set failed, output: %s' % output)
34 36
35 (status, output) = self.target.run("date -R") 37 (status, output) = self.target.run('date +"%s"')
36 p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output)
37 msg = 'The date was not set correctly, output: %s' % output 38 msg = 'The date was not set correctly, output: %s' % output
38 self.assertTrue(p, msg=msg) 39 self.assertTrue(int(output) - sampleTimestamp < 300, msg=msg)
39 40
40 (status, output) = self.target.run('date -s "%s"' % oldDate) 41 (status, output) = self.target.run('date -s "%s"' % oldDate)
41 msg = 'Failed to reset date, output: %s' % output 42 msg = 'Failed to reset date, output: %s' % output
diff --git a/meta/lib/oeqa/runtime/cases/df.py b/meta/lib/oeqa/runtime/cases/df.py
index bb155c9cf9..43e0ebf9ea 100644
--- a/meta/lib/oeqa/runtime/cases/df.py
+++ b/meta/lib/oeqa/runtime/cases/df.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/dnf.py b/meta/lib/oeqa/runtime/cases/dnf.py
index f40c63026e..3ccb18ce83 100644
--- a/meta/lib/oeqa/runtime/cases/dnf.py
+++ b/meta/lib/oeqa/runtime/cases/dnf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -73,48 +75,43 @@ class DnfRepoTest(DnfTest):
73 def test_dnf_makecache(self): 75 def test_dnf_makecache(self):
74 self.dnf_with_repo('makecache') 76 self.dnf_with_repo('makecache')
75 77
76
77# Does not work when repo is specified on the command line
78# @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
79# def test_dnf_repolist(self):
80# self.dnf_with_repo('repolist')
81
82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 78 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
83 def test_dnf_repoinfo(self): 79 def test_dnf_repoinfo(self):
84 self.dnf_with_repo('repoinfo') 80 self.dnf_with_repo('repoinfo')
85 81
86 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 82 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
87 def test_dnf_install(self): 83 def test_dnf_install(self):
88 output = self.dnf_with_repo('list run-postinsts-dev') 84 self.dnf_with_repo('remove -y dnf-test-*')
89 if 'Installed Packages' in output: 85 self.dnf_with_repo('install -y dnf-test-dep')
90 self.dnf_with_repo('remove -y run-postinsts-dev')
91 self.dnf_with_repo('install -y run-postinsts-dev')
92 86
93 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 87 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
94 def test_dnf_install_dependency(self): 88 def test_dnf_install_dependency(self):
95 self.dnf_with_repo('remove -y run-postinsts') 89 self.dnf_with_repo('remove -y dnf-test-*')
96 self.dnf_with_repo('install -y run-postinsts-dev') 90 self.dnf_with_repo('install -y dnf-test-main')
91 output = self.dnf('list --installed dnf-test-*')
92 self.assertIn("dnf-test-main.", output)
93 self.assertIn("dnf-test-dep.", output)
97 94
98 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency']) 95 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency'])
99 def test_dnf_install_from_disk(self): 96 def test_dnf_install_from_disk(self):
100 self.dnf_with_repo('remove -y run-postinsts-dev') 97 self.dnf_with_repo('remove -y dnf-test-dep')
101 self.dnf_with_repo('install -y --downloadonly run-postinsts-dev') 98 self.dnf_with_repo('install -y --downloadonly dnf-test-dep')
102 status, output = self.target.run('find /var/cache/dnf -name run-postinsts-dev*rpm', 1500) 99 status, output = self.target.run('find /var/cache/dnf -name dnf-test-dep*rpm')
103 self.assertEqual(status, 0, output) 100 self.assertEqual(status, 0, output)
104 self.dnf_with_repo('install -y %s' % output) 101 self.dnf_with_repo('install -y %s' % output)
105 102
106 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk']) 103 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk'])
107 def test_dnf_install_from_http(self): 104 def test_dnf_install_from_http(self):
108 output = subprocess.check_output('%s %s -name run-postinsts-dev*' % (bb.utils.which(os.getenv('PATH'), "find"), 105 output = subprocess.check_output('%s %s -name dnf-test-dep*' % (bb.utils.which(os.getenv('PATH'), "find"),
109 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8") 106 os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8")
110 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1] 107 rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1]
111 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path) 108 url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path)
112 self.dnf_with_repo('remove -y run-postinsts-dev') 109 self.dnf_with_repo('remove -y dnf-test-dep')
113 self.dnf_with_repo('install -y %s' % url) 110 self.dnf_with_repo('install -y %s' % url)
114 111
115 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) 112 @OETestDepends(['dnf.DnfRepoTest.test_dnf_install'])
116 def test_dnf_reinstall(self): 113 def test_dnf_reinstall(self):
117 self.dnf_with_repo('reinstall -y run-postinsts-dev') 114 self.dnf_with_repo('reinstall -y dnf-test-main')
118 115
119 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 116 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
120 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge') 117 @skipIfInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when not enable usrmerge')
@@ -137,55 +134,40 @@ class DnfRepoTest(DnfTest):
137 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500) 134 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
138 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500) 135 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
139 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500) 136 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
140 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath) 137 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
141 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500) 138 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
142 self.assertEqual(0, status, output) 139 self.assertEqual(0, status, output)
143 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 140 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500)
144 self.assertEqual(0, status, output) 141 self.assertEqual(0, status, output)
145 142
146 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 143 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
147 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmege') 144 @skipIfNotInDataVar('DISTRO_FEATURES', 'usrmerge', 'Test run when enable usrmerge')
148 @OEHasPackage('busybox') 145 @OEHasPackage('busybox')
149 def test_dnf_installroot_usrmerge(self): 146 def test_dnf_installroot_usrmerge(self):
150 rootpath = '/home/root/chroot/test' 147 rootpath = '/home/root/chroot/test'
151 #Copy necessary files to avoid errors with not yet installed tools on 148 #Copy necessary files to avoid errors with not yet installed tools on
152 #installroot directory. 149 #installroot directory.
153 self.target.run('mkdir -p %s/etc' % rootpath, 1500) 150 self.target.run('mkdir -p %s/etc' % rootpath)
154 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath), 1500) 151 self.target.run('mkdir -p %s/usr/bin %s/usr/sbin' % (rootpath, rootpath))
155 self.target.run('ln -sf -r %s/usr/bin %s/bin' % (rootpath, rootpath), 1500) 152 self.target.run('ln -sf usr/bin %s/bin' % (rootpath))
156 self.target.run('ln -sf -r %s/usr/sbin %s/sbin' % (rootpath, rootpath), 1500) 153 self.target.run('ln -sf usr/sbin %s/sbin' % (rootpath))
157 self.target.run('mkdir -p %s/dev' % rootpath, 1500) 154 self.target.run('mkdir -p %s/dev' % rootpath)
158 #Handle different architectures lib dirs 155 #Handle different architectures lib dirs
159 self.target.run('mkdir -p %s/usr/lib' % rootpath, 1500) 156 self.target.run("for l in /lib*; do mkdir -p %s/usr/$l; ln -s usr/$l %s/$l; done" % (rootpath, rootpath))
160 self.target.run('mkdir -p %s/usr/libx32' % rootpath, 1500) 157 self.target.run('cp -r /etc/rpm %s/etc' % rootpath)
161 self.target.run('mkdir -p %s/usr/lib64' % rootpath, 1500) 158 self.target.run('cp -r /etc/dnf %s/etc' % rootpath)
162 self.target.run('cp /lib/libtinfo.so.5 %s/usr/lib' % rootpath, 1500) 159 self.target.run('cp /bin/busybox %s/bin/sh' % rootpath)
163 self.target.run('cp /libx32/libtinfo.so.5 %s/usr/libx32' % rootpath, 1500) 160 self.target.run('mount -o bind /dev %s/dev/' % rootpath)
164 self.target.run('cp /lib64/libtinfo.so.5 %s/usr/lib64' % rootpath, 1500) 161 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox' % rootpath)
165 self.target.run('ln -sf -r %s/lib %s/usr/lib' % (rootpath,rootpath), 1500) 162 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath)
166 self.target.run('ln -sf -r %s/libx32 %s/usr/libx32' % (rootpath,rootpath), 1500)
167 self.target.run('ln -sf -r %s/lib64 %s/usr/lib64' % (rootpath,rootpath), 1500)
168 self.target.run('cp -r /etc/rpm %s/etc' % rootpath, 1500)
169 self.target.run('cp -r /etc/dnf %s/etc' % rootpath, 1500)
170 self.target.run('cp /bin/sh %s/bin' % rootpath, 1500)
171 self.target.run('mount -o bind /dev %s/dev/' % rootpath, 1500)
172 self.dnf_with_repo('install --installroot=%s -v -y --rpmverbosity=debug busybox run-postinsts' % rootpath)
173 status, output = self.target.run('test -e %s/var/cache/dnf' % rootpath, 1500)
174 self.assertEqual(0, status, output) 163 self.assertEqual(0, status, output)
175 status, output = self.target.run('test -e %s/bin/busybox' % rootpath, 1500) 164 status, output = self.target.run('test -e %s/bin/busybox' % rootpath)
176 self.assertEqual(0, status, output) 165 self.assertEqual(0, status, output)
177 166
178 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) 167 @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache'])
179 def test_dnf_exclude(self): 168 def test_dnf_exclude(self):
180 excludepkg = 'curl-dev' 169 self.dnf_with_repo('remove -y dnf-test-*')
181 self.dnf_with_repo('install -y curl*') 170 self.dnf_with_repo('install -y --exclude=dnf-test-dep dnf-test-*')
182 self.dnf('list %s' % excludepkg, 0) 171 output = self.dnf('list --installed dnf-test-*')
183 #Avoid remove dependencies to skip some errors on different archs and images 172 self.assertIn("dnf-test-main.", output)
184 self.dnf_with_repo('remove --setopt=clean_requirements_on_remove=0 -y curl*') 173 self.assertNotIn("dnf-test-dev.", output)
185 #check curl-dev is not installed adter removing all curl occurrences
186 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg, 1500)
187 self.assertEqual(1, status, "%s was not removed, is listed as installed"%excludepkg)
188 self.dnf_with_repo('install -y --exclude=%s --exclude=curl-staticdev curl*' % excludepkg)
189 #check curl-dev is not installed after being excluded
190 status, output = self.target.run('dnf list --installed | grep %s'% excludepkg , 1500)
191 self.assertEqual(1, status, "%s was not excluded, is listed as installed"%excludepkg)
diff --git a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
index e010612838..eac8f2d082 100644
--- a/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
+++ b/meta/lib/oeqa/runtime/cases/ethernet_ip_connman.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -11,7 +16,7 @@ class Ethernet_Test(OERuntimeTestCase):
11 x = '.'.join(x) 16 x = '.'.join(x)
12 return x 17 return x
13 18
14 @skipIfQemu('qemuall', 'Test only runs on real hardware') 19 @skipIfQemu()
15 @OETestDepends(['ssh.SSHTest.test_ssh']) 20 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 def test_set_virtual_ip(self): 21 def test_set_virtual_ip(self):
17 (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'") 22 (status, output) = self.target.run("ifconfig eth0 | grep 'inet ' | awk '{print $2}'")
@@ -22,6 +27,7 @@ class Ethernet_Test(OERuntimeTestCase):
22 (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip)) 27 (status, output) = self.target.run("ifconfig eth0:1 %s netmask 255.255.255.0 && sleep 2 && ping -c 5 %s && ifconfig eth0:1 down" % (virtual_ip,virtual_ip))
23 self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output) 28 self.assertEqual(status, 0, msg='Failed to create virtual ip address, output: %s' % output)
24 29
30 @skipIfQemu()
25 @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip']) 31 @OETestDepends(['ethernet_ip_connman.Ethernet_Test.test_set_virtual_ip'])
26 def test_get_ip_from_dhcp(self): 32 def test_get_ip_from_dhcp(self):
27 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'") 33 (status, output) = self.target.run("connmanctl services | grep -E '*AO Wired|*AR Wired' | awk '{print $3}'")
diff --git a/meta/lib/oeqa/runtime/cases/gcc.py b/meta/lib/oeqa/runtime/cases/gcc.py
index 1b6e431bf4..17b1483e8d 100644
--- a/meta/lib/oeqa/runtime/cases/gcc.py
+++ b/meta/lib/oeqa/runtime/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/gi.py b/meta/lib/oeqa/runtime/cases/gi.py
index 42bd100a31..78c7ddda2c 100644
--- a/meta/lib/oeqa/runtime/cases/gi.py
+++ b/meta/lib/oeqa/runtime/cases/gi.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/go.py b/meta/lib/oeqa/runtime/cases/go.py
new file mode 100644
index 0000000000..39a80f4dca
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/go.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class GoHelloworldTest(OERuntimeTestCase):
12 @OETestDepends(['ssh.SSHTest.test_ssh'])
13 @OEHasPackage(['go-helloworld'])
14 def test_gohelloworld(self):
15 cmd = "go-helloworld"
16 status, output = self.target.run(cmd)
17 msg = 'Exit status was not 0. Output: %s' % output
18 self.assertEqual(status, 0, msg=msg)
19
20 msg = 'Incorrect output: %s' % output
21 self.assertEqual(output, "Hello, world!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/gstreamer.py b/meta/lib/oeqa/runtime/cases/gstreamer.py
index f735f82e3b..2295769cfd 100644
--- a/meta/lib/oeqa/runtime/cases/gstreamer.py
+++ b/meta/lib/oeqa/runtime/cases/gstreamer.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/kernelmodule.py b/meta/lib/oeqa/runtime/cases/kernelmodule.py
index 47fd2f850c..9c42fcc586 100644
--- a/meta/lib/oeqa/runtime/cases/kernelmodule.py
+++ b/meta/lib/oeqa/runtime/cases/kernelmodule.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ksample.py b/meta/lib/oeqa/runtime/cases/ksample.py
index a9a1620ebd..b6848762e3 100644
--- a/meta/lib/oeqa/runtime/cases/ksample.py
+++ b/meta/lib/oeqa/runtime/cases/ksample.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfNotFeature 12from oeqa.core.decorator.data import skipIfNotFeature
11 13
12# need some kernel fragments 14# need some kernel fragments
13# echo "KERNEL_FEATURES_append += \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf 15# echo "KERNEL_FEATURES:append = \" features\/kernel\-sample\/kernel\-sample.scc\"" >> local.conf
14class KSample(OERuntimeTestCase): 16class KSample(OERuntimeTestCase):
15 def cmd_and_check(self, cmd='', match_string=''): 17 def cmd_and_check(self, cmd='', match_string=''):
16 status, output = self.target.run(cmd) 18 status, output = self.target.run(cmd)
diff --git a/meta/lib/oeqa/runtime/cases/ldd.py b/meta/lib/oeqa/runtime/cases/ldd.py
index 9c2caa8f65..f6841c6675 100644
--- a/meta/lib/oeqa/runtime/cases/ldd.py
+++ b/meta/lib/oeqa/runtime/cases/ldd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/login.py b/meta/lib/oeqa/runtime/cases/login.py
new file mode 100644
index 0000000000..e1bc60d49b
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/login.py
@@ -0,0 +1,116 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import subprocess
9import tempfile
10import time
11import os
12from datetime import datetime
13from oeqa.runtime.case import OERuntimeTestCase
14from oeqa.runtime.decorator.package import OEHasPackage
15
16### Status of qemu images.
17# - runqemu qemuppc64 comes up blank. (skip)
18# - qemuarmv5 comes up with multiple heads but sending "head" to screendump.
19# seems to create a png with a bad header? (skip for now, but come back to fix)
20# - qemuriscv32 and qemuloongarch64 doesn't work with testimage apparently? (skip)
21# - qemumips64 is missing mouse icon.
22# - qemumips takes forever to render and is missing mouse icon.
23# - qemuarm and qemuppc are odd as they don't resize so we need to just set width.
24# - All images have home and screen flipper icons not always rendered fully at first.
25# the sleep seems to help this out some, depending on machine load.
26###
27
28class LoginTest(OERuntimeTestCase):
29 @OEHasPackage(['matchbox-desktop', 'dbus-wait'])
30 def test_screenshot(self):
31 if self.td.get('MACHINE') in ("qemuppc64", "qemuarmv5", "qemuriscv32", "qemuriscv64", "qemuloongarch64"):
32 self.skipTest("{0} is not currently supported.".format(self.td.get('MACHINE')))
33
34 pn = self.td.get('PN')
35
36 ourenv = os.environ.copy()
37 origpath = self.td.get("ORIGPATH")
38 if origpath:
39 ourenv['PATH'] = ourenv['PATH'] + ":" + origpath
40
41 for cmd in ["identify.im7", "convert.im7", "compare.im7"]:
42 try:
43 subprocess.check_output(["which", cmd], env=ourenv)
44 except subprocess.CalledProcessError:
45 self.skipTest("%s (from imagemagick) not available" % cmd)
46
47
48 # Store images so we can debug them if needed
49 saved_screenshots_dir = self.td.get('T') + "/saved-screenshots/"
50
51 ###
52 # This is a really horrible way of doing this but I've not found the
53 # right event to determine "The system is loaded and screen is rendered"
54 #
55 # Using dbus-wait for matchbox is the wrong answer because while it
56 # ensures the system is up, it doesn't mean the screen is rendered.
57 #
58 # Checking the qmp socket doesn't work afaik either.
59 #
60 # One way to do this is to do compares of known good screendumps until
61 # we either get expected or close to expected or we time out. Part of the
62 # issue here with that is that there is a very fine difference in the
63 # diff between a screendump where the icons haven't loaded yet and
64 # one where they won't load. I'll look at that next, but, for now, this.
65 #
66 # Which is ugly and I hate it but it 'works' for various definitions of
67 # 'works'.
68 ###
69 # RP: if the signal is sent before we run this, it will never be seen and we'd timeout
70 #status, output = self.target.run('dbus-wait org.matchbox_project.desktop Loaded')
71 #if status != 0 or "Timeout" in output:
72 # self.fail('dbus-wait failed (%s, %s). This could mean that the image never loaded the matchbox desktop.' % (status, output))
73
74 # Start taking screenshots every 2 seconds until diff=0 or timeout is 60 seconds
75 timeout = time.time() + 60
76 diff = True
77 with tempfile.NamedTemporaryFile(prefix="oeqa-screenshot-login", suffix=".png") as t:
78 while diff != 0 and time.time() < timeout:
79 time.sleep(2)
80 ret = self.target.runner.run_monitor("screendump", args={"filename": t.name, "format":"png"})
81
82 # Find out size of image so we can determine where to blank out clock.
83 # qemuarm and qemuppc are odd as it doesn't resize the window and returns
84 # incorrect widths
85 if self.td.get('MACHINE') == "qemuarm" or self.td.get('MACHINE') == "qemuppc":
86 width = "640"
87 else:
88 cmd = "identify.im7 -ping -format '%w' {0}".format(t.name)
89 width = subprocess.check_output(cmd, shell=True, env=ourenv).decode()
90
91 rblank = int(float(width))
92 lblank = rblank-80
93
94 # Use the meta-oe version of convert, along with it's suffix. This blanks out the clock.
95 cmd = "convert.im7 {0} -fill white -draw 'rectangle {1},4 {2},28' {3}".format(t.name, str(rblank), str(lblank), t.name)
96 convert_out=subprocess.check_output(cmd, shell=True, env=ourenv).decode()
97
98 bb.utils.mkdirhier(saved_screenshots_dir)
99 savedfile = "{0}/saved-{1}-{2}-{3}.png".format(saved_screenshots_dir, \
100 datetime.timestamp(datetime.now()), \
101 pn, \
102 self.td.get('MACHINE'))
103 shutil.copy2(t.name, savedfile)
104
105 refimage = self.td.get('COREBASE') + "/meta/files/screenshot-tests/" + pn + "-" + self.td.get('MACHINE') +".png"
106 if not os.path.exists(refimage):
107 self.skipTest("No reference image for comparision (%s)" % refimage)
108
109 cmd = "compare.im7 -metric MSE {0} {1} /dev/null".format(t.name, refimage)
110 compare_out = subprocess.run(cmd, shell=True, capture_output=True, text=True, env=ourenv)
111 diff=float(compare_out.stderr.replace("(", "").replace(")","").split()[1])
112 if diff > 0:
113 # Keep a copy of the failed screenshot so we can see what happened.
114 self.fail("Screenshot diff is {0}. Failed image stored in {1}".format(str(diff), savedfile))
115 else:
116 self.assertEqual(0, diff, "Screenshot diff is {0}.".format(str(diff)))
diff --git a/meta/lib/oeqa/runtime/cases/logrotate.py b/meta/lib/oeqa/runtime/cases/logrotate.py
index a4efcd07c0..6ad980cb6a 100644
--- a/meta/lib/oeqa/runtime/cases/logrotate.py
+++ b/meta/lib/oeqa/runtime/cases/logrotate.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class LogrotateTest(OERuntimeTestCase):
17 19
18 @classmethod 20 @classmethod
19 def tearDownClass(cls): 21 def tearDownClass(cls):
20 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf $HOME/logrotate_dir') 22 cls.tc.target.run('mv -f $HOME/wtmp.oeqabak /etc/logrotate.d/wtmp && rm -rf /var/log//logrotate_dir')
21 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile') 23 cls.tc.target.run('rm -rf /var/log/logrotate_testfile && rm -rf /etc/logrotate.d/logrotate_testfile')
22 24
23 @OETestDepends(['ssh.SSHTest.test_ssh']) 25 @OETestDepends(['ssh.SSHTest.test_ssh'])
@@ -29,17 +31,17 @@ class LogrotateTest(OERuntimeTestCase):
29 msg = ('Could not create/update /var/log/wtmp with touch') 31 msg = ('Could not create/update /var/log/wtmp with touch')
30 self.assertEqual(status, 0, msg = msg) 32 self.assertEqual(status, 0, msg = msg)
31 33
32 status, output = self.target.run('mkdir $HOME/logrotate_dir') 34 status, output = self.target.run('mkdir /var/log//logrotate_dir')
33 msg = ('Could not create logrotate_dir. Output: %s' % output) 35 msg = ('Could not create logrotate_dir. Output: %s' % output)
34 self.assertEqual(status, 0, msg = msg) 36 self.assertEqual(status, 0, msg = msg)
35 37
36 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf') 38 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/wtmp" > /tmp/logrotate-test.conf')
37 msg = ('Could not write to /tmp/logrotate-test.conf') 39 msg = ('Could not write to /tmp/logrotate-test.conf')
38 self.assertEqual(status, 0, msg = msg) 40 self.assertEqual(status, 0, msg = msg)
39 41
40 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it 42 # If logrotate fails to rotate the log, view the verbose output of logrotate to see what prevented it
41 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf') 43 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test.conf')
42 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep wtmp.1') 44 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep wtmp.1')
43 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output)) 45 msg = ("logrotate did not successfully rotate the wtmp log. Output from logrotate -vf: \n%s" % (logrotate_output))
44 self.assertEqual(status, 0, msg = msg) 46 self.assertEqual(status, 0, msg = msg)
45 47
@@ -54,17 +56,17 @@ class LogrotateTest(OERuntimeTestCase):
54 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile') 56 msg = ('Could not write to /etc/logrotate.d/logrotate_testfile')
55 self.assertEqual(status, 0, msg = msg) 57 self.assertEqual(status, 0, msg = msg)
56 58
57 status, output = self.target.run('echo "create \n olddir $HOME/logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf') 59 status, output = self.target.run('echo "create \n olddir /var/log//logrotate_dir \n include /etc/logrotate.d/logrotate_testfile" > /tmp/logrotate-test2.conf')
58 msg = ('Could not write to /tmp/logrotate_test2.conf') 60 msg = ('Could not write to /tmp/logrotate_test2.conf')
59 self.assertEqual(status, 0, msg = msg) 61 self.assertEqual(status, 0, msg = msg)
60 62
61 status, output = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 63 status, output = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
62 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir') 64 msg = ('A rotated log for logrotate_testfile is already present in logrotate_dir')
63 self.assertEqual(status, 1, msg = msg) 65 self.assertEqual(status, 1, msg = msg)
64 66
65 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir 67 # If logrotate fails to rotate the log, view the verbose output of logrotate instead of just listing the files in olddir
66 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf') 68 _, logrotate_output = self.target.run('logrotate -vf /tmp/logrotate-test2.conf')
67 status, _ = self.target.run('find $HOME/logrotate_dir -type f | grep logrotate_testfile.1') 69 status, _ = self.target.run('find /var/log//logrotate_dir -type f | grep logrotate_testfile.1')
68 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output)) 70 msg = ('logrotate did not successfully rotate the logrotate_test log. Output from logrotate -vf: \n%s' % (logrotate_output))
69 self.assertEqual(status, 0, msg = msg) 71 self.assertEqual(status, 0, msg = msg)
70 72
diff --git a/meta/lib/oeqa/runtime/cases/ltp.py b/meta/lib/oeqa/runtime/cases/ltp.py
index a66d5d13d7..f588a93200 100644
--- a/meta/lib/oeqa/runtime/cases/ltp.py
+++ b/meta/lib/oeqa/runtime/cases/ltp.py
@@ -57,37 +57,47 @@ class LtpTestBase(OERuntimeTestCase):
57 57
58class LtpTest(LtpTestBase): 58class LtpTest(LtpTestBase):
59 59
60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "connectors", "commands", "net.ipv6_lib", "input","fs_perms_simple"] 60 ltp_groups = ["math", "syscalls", "dio", "io", "mm", "ipc", "sched", "nptl", "pty", "containers", "controllers", "filecaps", "cap_bounds", "fcntl-locktests", "commands", "net.ipv6_lib", "input","fs_perms_simple", "cve", "crypto", "ima", "net.nfs", "net_stress.ipsec_icmp", "net.ipv6", "numa", "uevent", "ltp-aiodio.part1", "ltp-aiodio.part2", "ltp-aiodio.part3", "ltp-aiodio.part4"]
61 61
62 ltp_fs = ["fs", "fsx", "fs_bind"] 62 ltp_fs = ["fs", "fs_bind"]
63 # skip kernel cpuhotplug 63 # skip kernel cpuhotplug
64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"] 64 ltp_kernel = ["power_management_tests", "hyperthreading ", "kernel_misc", "hugetlb"]
65 ltp_groups += ltp_fs 65 ltp_groups += ltp_fs
66 66
67 def runltp(self, ltp_group): 67 def runltp(self, ltp_group):
68 cmd = '/opt/ltp/runltp -f %s -p -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group) 68 # LTP appends to log files, so ensure we start with a clean log
69 self.target.deleteFiles("/opt/ltp/results/", ltp_group)
70
71 cmd = '/opt/ltp/runltp -f %s -q -r /opt/ltp -l /opt/ltp/results/%s -I 1 -d /opt/ltp' % (ltp_group, ltp_group)
72
69 starttime = time.time() 73 starttime = time.time()
70 (status, output) = self.target.run(cmd) 74 (status, output) = self.target.run(cmd, timeout=1200)
71 endtime = time.time() 75 endtime = time.time()
72 76
77 # status of 1 is 'just' tests failing. 255 likely was a command output timeout
78 if status and status != 1:
79 msg = 'Command %s returned exit code %s' % (cmd, status)
80 self.target.logger.warning(msg)
81
82 # Write the console log to disk for convenience
73 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f: 83 with open(os.path.join(self.ltptest_log_dir, "%s-raw.log" % ltp_group), 'w') as f:
74 f.write(output) 84 f.write(output)
75 85
86 # Also put the console log into the test result JSON
76 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output 87 self.extras['ltpresult.rawlogs']['log'] = self.extras['ltpresult.rawlogs']['log'] + output
77 88
78 # copy nice log from DUT 89 # Copy the machine-readable test results locally so we can parse it
79 dst = os.path.join(self.ltptest_log_dir, "%s" % ltp_group ) 90 dst = os.path.join(self.ltptest_log_dir, ltp_group)
80 remote_src = "/opt/ltp/results/%s" % ltp_group 91 remote_src = "/opt/ltp/results/%s" % ltp_group
81 (status, output) = self.target.copyFrom(remote_src, dst, True) 92 (status, output) = self.target.copyFrom(remote_src, dst, True)
82 msg = 'File could not be copied. Output: %s' % output
83 if status: 93 if status:
94 msg = 'File could not be copied. Output: %s' % output
84 self.target.logger.warning(msg) 95 self.target.logger.warning(msg)
85 96
86 parser = LtpParser() 97 parser = LtpParser()
87 results, sections = parser.parse(dst) 98 results, sections = parser.parse(dst)
88 99
89 runtime = int(endtime-starttime) 100 sections['duration'] = int(endtime-starttime)
90 sections['duration'] = runtime
91 self.sections[ltp_group] = sections 101 self.sections[ltp_group] = sections
92 102
93 failed_tests = {} 103 failed_tests = {}
diff --git a/meta/lib/oeqa/runtime/cases/ltp_stress.py b/meta/lib/oeqa/runtime/cases/ltp_stress.py
index 2445ffbc93..ce6f4bf59d 100644
--- a/meta/lib/oeqa/runtime/cases/ltp_stress.py
+++ b/meta/lib/oeqa/runtime/cases/ltp_stress.py
@@ -89,8 +89,7 @@ class LtpStressTest(LtpStressBase):
89 89
90 # LTP stress runtime tests 90 # LTP stress runtime tests
91 # 91 #
92 @skipIfQemu('qemuall', 'Test only runs on real hardware') 92 @skipIfQemu()
93
94 @OETestDepends(['ssh.SSHTest.test_ssh']) 93 @OETestDepends(['ssh.SSHTest.test_ssh'])
95 @OEHasPackage(["ltp"]) 94 @OEHasPackage(["ltp"])
96 def test_ltp_stress(self): 95 def test_ltp_stress(self):
diff --git a/meta/lib/oeqa/runtime/cases/maturin.py b/meta/lib/oeqa/runtime/cases/maturin.py
new file mode 100644
index 0000000000..4e6384fe5e
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/maturin.py
@@ -0,0 +1,58 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8
9from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends
11from oeqa.runtime.decorator.package import OEHasPackage
12
13
14class MaturinTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
16 @OEHasPackage(['python3-maturin'])
17 def test_maturin_list_python(self):
18 status, output = self.target.run("maturin list-python")
19 self.assertEqual(status, 0)
20 _, py_major = self.target.run("python3 -c 'import sys; print(sys.version_info.major)'")
21 _, py_minor = self.target.run("python3 -c 'import sys; print(sys.version_info.minor)'")
22 python_version = "%s.%s" % (py_major, py_minor)
23 self.assertEqual(output, "🐍 1 python interpreter found:\n"
24 " - CPython %s at /usr/bin/python%s" % (python_version, python_version))
25
26
27class MaturinDevelopTest(OERuntimeTestCase):
28 @classmethod
29 def setUp(cls):
30 dst = '/tmp'
31 src = os.path.join(cls.tc.files_dir, "maturin/guessing-game")
32 cls.tc.target.copyTo(src, dst)
33
34 @classmethod
35 def tearDown(cls):
36 cls.tc.target.run('rm -rf %s' % '/tmp/guessing-game/target')
37
38 @OETestDepends(['ssh.SSHTest.test_ssh', 'python.PythonTest.test_python3'])
39 @OEHasPackage(['python3-maturin'])
40 def test_maturin_develop(self):
41 """
42 This test case requires:
43 (1) that a .venv can been created.
44 (2) DNS nameserver to resolve crate URIs for fetching
45 (3) a functional 'rustc' and 'cargo'
46 """
47 targetdir = os.path.join("/tmp", "guessing-game")
48 self.target.run("cd %s; python3 -m venv .venv" % targetdir)
49 self.target.run("echo 'nameserver 8.8.8.8' > /etc/resolv.conf")
50 cmd = "cd %s; maturin develop" % targetdir
51 status, output = self.target.run(cmd)
52 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
53 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
54 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
55 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
56 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
57 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
58 self.assertEqual(status, 0)
diff --git a/meta/lib/oeqa/runtime/cases/multilib.py b/meta/lib/oeqa/runtime/cases/multilib.py
index 0d1b9ae2c9..68556e45c5 100644
--- a/meta/lib/oeqa/runtime/cases/multilib.py
+++ b/meta/lib/oeqa/runtime/cases/multilib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/oe_syslog.py b/meta/lib/oeqa/runtime/cases/oe_syslog.py
index f3c2bedbaf..adb876160d 100644
--- a/meta/lib/oeqa/runtime/cases/oe_syslog.py
+++ b/meta/lib/oeqa/runtime/cases/oe_syslog.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -114,18 +116,23 @@ class SyslogTestConfig(OERuntimeTestCase):
114 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger']) 116 @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger'])
115 @OEHasPackage(["busybox-syslog"]) 117 @OEHasPackage(["busybox-syslog"])
116 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 118 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
117 'Not appropiate for systemd image') 119 'Not appropriate for systemd image')
118 def test_syslog_startup_config(self): 120 def test_syslog_startup_config(self):
119 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf' 121 cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf'
120 self.target.run(cmd) 122 self.target.run(cmd)
121 123
122 self.test_syslog_restart() 124 self.test_syslog_restart()
123 125
124 cmd = 'logger foobar && grep foobar /var/log/test' 126 cmd = 'logger foobar'
125 status,output = self.target.run(cmd) 127 status, output = self.target.run(cmd)
126 msg = 'Test log string not found. Output: %s ' % output 128 msg = 'Logger command failed, %s. Output: %s ' % (status, output)
127 self.assertEqual(status, 0, msg=msg) 129 self.assertEqual(status, 0, msg=msg)
128 130
131 cmd = 'cat /var/log/test'
132 status, output = self.target.run(cmd)
133 if "foobar" not in output or status:
134 self.fail("'foobar' not found in logfile, status %s, contents %s" % (status, output))
135
129 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf" 136 cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf"
130 self.target.run(cmd) 137 self.target.run(cmd)
131 self.test_syslog_restart() 138 self.test_syslog_restart()
diff --git a/meta/lib/oeqa/runtime/cases/opkg.py b/meta/lib/oeqa/runtime/cases/opkg.py
index 9cfee1cd88..a29c93e59a 100644
--- a/meta/lib/oeqa/runtime/cases/opkg.py
+++ b/meta/lib/oeqa/runtime/cases/opkg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/pam.py b/meta/lib/oeqa/runtime/cases/pam.py
index a482ded945..b3e8b56c3c 100644
--- a/meta/lib/oeqa/runtime/cases/pam.py
+++ b/meta/lib/oeqa/runtime/cases/pam.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
new file mode 100644
index 0000000000..f91abbc941
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-common.txt
@@ -0,0 +1,62 @@
1# Xserver explains what the short codes mean
2(WW) warning, (EE) error, (NI) not implemented, (??) unknown.
3
4# Xserver warns if compiled with ACPI but no acpid running
5Open ACPI failed (/var/run/acpid.socket) (No such file or directory)
6
7# Some machines (eg qemux86) don't enable PAE (they probably should though)
8NX (Execute Disable) protection cannot be enabled: non-PAE kernel!
9
10# Connman's pacrunner warns if external connectivity isn't available
11Failed to find URL:http://ipv4.connman.net/online/status.html
12Failed to find URL:http://ipv6.connman.net/online/status.html
13
14# x86 on 6.6+ outputs this message, it is informational, not an error
15ACPI: _OSC evaluation for CPUs failed, trying _PDC
16
17# These should be reviewed to see if they are still needed
18dma timeout
19can\'t add hid device:
20usbhid: probe of
21_OSC failed (AE_ERROR)
22_OSC failed (AE_SUPPORT)
23AE_ALREADY_EXISTS
24ACPI _OSC request failed (AE_SUPPORT)
25can\'t disable ASPM
26Failed to load module "vesa"
27Failed to load module "modesetting"
28Failed to load module "glx"
29Failed to load module "fbdev"
30Failed to load module "ati"
31[drm] Cannot find any crtc or sizes
32_OSC failed (AE_NOT_FOUND); disabling ASPM
33hd.: possibly failed opcode
34NETLINK INITIALIZATION FAILED
35kernel: Cannot find map file
36omap_hwmod: debugss: _wait_target_disable failed
37VGA arbiter: cannot open kernel arbiter, no multi-card support
38Online check failed for
39netlink init failed
40Fast TSC calibration
41controller can't do DEVSLP, turning off
42stmmac_dvr_probe: warning: cannot get CSR clock
43error: couldn\'t mount because of unsupported optional features
44GPT: Use GNU Parted to correct GPT errors
45Cannot set xattr user.Librepo.DownloadInProgress
46Failed to read /var/lib/nfs/statd/state: Success
47error retry time-out =
48logind: cannot setup systemd-logind helper (-61), using legacy fallback
49Failed to rename network interface
50Failed to process device, ignoring: Device or resource busy
51Cannot find a map file
52[rdrand]: Initialization Failed
53[rndr ]: Initialization Failed
54[pulseaudio] authkey.c: Failed to open cookie file
55[pulseaudio] authkey.c: Failed to load authentication key
56was skipped because of a failed condition check
57was skipped because all trigger condition checks failed
58xf86OpenConsole: Switching VT failed
59Failed to read LoaderConfigTimeoutOneShot variable, ignoring: Operation not supported
60Failed to read LoaderEntryOneShot variable, ignoring: Operation not supported
61Direct firmware load for regulatory.db
62failed to load regulatory.db
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
new file mode 100644
index 0000000000..2c0bd9a247
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-mipsarch.txt
@@ -0,0 +1,2 @@
1# These should be reviewed to see if they are still needed
2cacheinfo: Failed to find cpu0 device node
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
new file mode 100644
index 0000000000..b0c0fc9ddf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuall.txt
@@ -0,0 +1,27 @@
1# psplash
2FBIOPUT_VSCREENINFO failed, double buffering disabled
3
4# PCI host bridge to bus 0000:00
5# pci_bus 0000:00: root bus resource [mem 0x10000000-0x17ffffff]
6# pci_bus 0000:00: root bus resource [io 0x1000-0x1fffff]
7# pci_bus 0000:00: No busn resource found for root bus, will use [bus 00-ff]
8# pci 0000:00:00.0: [2046:ab11] type 00 class 0x100000
9# pci 0000:00:00.0: [Firmware Bug]: reg 0x10: invalid BAR (can't size)
10# pci 0000:00:00.0: [Firmware Bug]: reg 0x14: invalid BAR (can't size)
11# pci 0000:00:00.0: [Firmware Bug]: reg 0x18: invalid BAR (can't size)
12# pci 0000:00:00.0: [Firmware Bug]: reg 0x1c: invalid BAR (can't size)
13# pci 0000:00:00.0: [Firmware Bug]: reg 0x20: invalid BAR (can't size)
14# pci 0000:00:00.0: [Firmware Bug]: reg 0x24: invalid BAR (can't size)
15invalid BAR (can't size)
16
17# These should be reviewed to see if they are still needed
18wrong ELF class
19fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge
20can't claim BAR
21amd_nb: Cannot enumerate AMD northbridges
22tsc: HPET/PMTIMER calibration failed
23modeset(0): Failed to initialize the DRI2 extension
24glamor initialization failed
25blk_update_request: I/O error, dev fd0, sector 0 op 0x0:(READ)
26floppy: error
27failed to IDENTIFY (I/O error, err_mask=0x4)
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
new file mode 100644
index 0000000000..260cdde620
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarm64.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2Fatal server error:
3(EE) Server terminated with error (1). Closing log file.
4dmi: Firmware registration failed.
5irq: type mismatch, failed to map hwirq-27 for /intc
6logind: failed to get session seat \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
new file mode 100644
index 0000000000..ed91107b7d
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuarmv5.txt
@@ -0,0 +1,19 @@
1# Code is 2 JENT_ECOARSETIME: Timer too coarse for RNG.
2jitterentropy: Initialization failed with host not compliant with requirements: 2
3
4# These should be reviewed to see if they are still needed
5mmci-pl18x: probe of fpga:05 failed with error -22
6mmci-pl18x: probe of fpga:0b failed with error -22
7
8OF: amba_device_add() failed (-19) for /amba/smc@10100000
9OF: amba_device_add() failed (-19) for /amba/mpmc@10110000
10OF: amba_device_add() failed (-19) for /amba/sctl@101e0000
11OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000
12OF: amba_device_add() failed (-19) for /amba/sci@101f0000
13OF: amba_device_add() failed (-19) for /amba/spi@101f4000
14OF: amba_device_add() failed (-19) for /amba/ssp@101f4000
15OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000
16Failed to initialize '/amba/timer@101e3000': -22
17
18clcd-pl11x: probe of 10120000.display failed with error -2
19arm-charlcd 10008000.lcd: error -ENXIO: IRQ index 0 not found
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
new file mode 100644
index 0000000000..d9b58b58f1
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc.txt
@@ -0,0 +1,6 @@
1# These should be reviewed to see if they are still needed
2PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]
3host side 80-wire cable detection failed, limiting max speed
4mode "640x480" test failed
5can't handle BAR above 4GB
6Cannot reserve Legacy IO \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
new file mode 100644
index 0000000000..b736a2aeb7
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemuppc64.txt
@@ -0,0 +1,4 @@
1# These should be reviewed to see if they are still needed
2vio vio: uevent: failed to send synthetic uevent
3synth uevent: /devices/vio: failed to send uevent
4PCI 0000:00 Cannot reserve Legacy IO [io 0x10000-0x10fff] \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
new file mode 100644
index 0000000000..ebb76f1221
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-qemux86.txt
@@ -0,0 +1,2 @@
1# These should be reviewed to see if they are still needed
2Failed to access perfctr msr (MSR
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
new file mode 100644
index 0000000000..5985247daf
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86.txt
@@ -0,0 +1,10 @@
1# These should be reviewed to see if they are still needed
2[drm:psb_do_init] *ERROR* Debug is
3wrong ELF class
4Could not enable PowerButton event
5probe of LNXPWRBN:00 failed with error -22
6pmd_set_huge: Cannot satisfy
7failed to setup card detect gpio
8amd_nb: Cannot enumerate AMD northbridges
9failed to retrieve link info, disabling eDP
10Direct firmware load for iwlwifi
diff --git a/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
new file mode 120000
index 0000000000..404e384c32
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/parselogs-ignores-x86_64.txt
@@ -0,0 +1 @@
parselogs-ignores-x86.txt \ No newline at end of file
diff --git a/meta/lib/oeqa/runtime/cases/parselogs.py b/meta/lib/oeqa/runtime/cases/parselogs.py
index a1791b5cca..6966923c94 100644
--- a/meta/lib/oeqa/runtime/cases/parselogs.py
+++ b/meta/lib/oeqa/runtime/cases/parselogs.py
@@ -1,204 +1,49 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import collections
5import os 8import os
9import sys
6 10
7from subprocess import check_output
8from shutil import rmtree 11from shutil import rmtree
9from oeqa.runtime.case import OERuntimeTestCase 12from oeqa.runtime.case import OERuntimeTestCase
10from oeqa.core.decorator.depends import OETestDepends 13from oeqa.core.decorator.depends import OETestDepends
11from oeqa.core.decorator.data import skipIfDataVar
12from oeqa.runtime.decorator.package import OEHasPackage
13
14#in the future these lists could be moved outside of module
15errors = ["error", "cannot", "can\'t", "failed"]
16
17common_errors = [
18 "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
19 "dma timeout",
20 "can\'t add hid device:",
21 "usbhid: probe of ",
22 "_OSC failed (AE_ERROR)",
23 "_OSC failed (AE_SUPPORT)",
24 "AE_ALREADY_EXISTS",
25 "ACPI _OSC request failed (AE_SUPPORT)",
26 "can\'t disable ASPM",
27 "Failed to load module \"vesa\"",
28 "Failed to load module vesa",
29 "Failed to load module \"modesetting\"",
30 "Failed to load module modesetting",
31 "Failed to load module \"glx\"",
32 "Failed to load module \"fbdev\"",
33 "Failed to load module fbdev",
34 "Failed to load module glx",
35 "[drm] Cannot find any crtc or sizes - going 1024x768",
36 "_OSC failed (AE_NOT_FOUND); disabling ASPM",
37 "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)",
38 "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!",
39 "hd.: possibly failed opcode",
40 'NETLINK INITIALIZATION FAILED',
41 'kernel: Cannot find map file',
42 'omap_hwmod: debugss: _wait_target_disable failed',
43 'VGA arbiter: cannot open kernel arbiter, no multi-card support',
44 'Failed to find URL:http://ipv4.connman.net/online/status.html',
45 'Online check failed for',
46 'netlink init failed',
47 'Fast TSC calibration',
48 "BAR 0-9",
49 "Failed to load module \"ati\"",
50 "controller can't do DEVSLP, turning off",
51 "stmmac_dvr_probe: warning: cannot get CSR clock",
52 "error: couldn\'t mount because of unsupported optional features",
53 "GPT: Use GNU Parted to correct GPT errors",
54 "Cannot set xattr user.Librepo.DownloadInProgress",
55 "Failed to read /var/lib/nfs/statd/state: Success",
56 "error retry time-out =",
57 "logind: cannot setup systemd-logind helper (-61), using legacy fallback",
58 "Failed to rename network interface",
59 "Failed to process device, ignoring: Device or resource busy",
60 "Cannot find a map file",
61 "[rdrand]: Initialization Failed",
62 "[pulseaudio] authkey.c: Failed to open cookie file",
63 "[pulseaudio] authkey.c: Failed to load authentication key",
64 ]
65 14
66video_related = [ 15# importlib.resources.open_text in Python <3.10 doesn't search all directories
67] 16# when a package is split across multiple directories. Until we can rely on
17# 3.10+, reimplement the searching logic.
18if sys.version_info < (3, 10):
19 def _open_text(package, resource):
20 import importlib, pathlib
21 module = importlib.import_module(package)
22 for path in module.__path__:
23 candidate = pathlib.Path(path) / resource
24 if candidate.exists():
25 return candidate.open(encoding='utf-8')
26 raise FileNotFoundError
27else:
28 from importlib.resources import open_text as _open_text
68 29
69x86_common = [
70 '[drm:psb_do_init] *ERROR* Debug is',
71 'wrong ELF class',
72 'Could not enable PowerButton event',
73 'probe of LNXPWRBN:00 failed with error -22',
74 'pmd_set_huge: Cannot satisfy',
75 'failed to setup card detect gpio',
76 'amd_nb: Cannot enumerate AMD northbridges',
77 'failed to retrieve link info, disabling eDP',
78 'Direct firmware load for iwlwifi',
79 'Direct firmware load for regulatory.db',
80 'failed to load regulatory.db',
81] + common_errors
82 30
83qemux86_common = [ 31class ParseLogsTest(OERuntimeTestCase):
84 'wrong ELF class',
85 "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
86 "can't claim BAR ",
87 'amd_nb: Cannot enumerate AMD northbridges',
88 'tsc: HPET/PMTIMER calibration failed',
89 "modeset(0): Failed to initialize the DRI2 extension",
90 "glamor initialization failed",
91] + common_errors
92 32
93ignore_errors = { 33 # Which log files should be collected
94 'default' : common_errors, 34 log_locations = ["/var/log/", "/var/log/dmesg", "/tmp/dmesg_output.log"]
95 'qemux86' : [
96 'Failed to access perfctr msr (MSR',
97 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
98 ] + qemux86_common,
99 'qemux86-64' : qemux86_common,
100 'qemumips' : [
101 'Failed to load module "glx"',
102 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
103 'cacheinfo: Failed to find cpu0 device node',
104 ] + common_errors,
105 'qemumips64' : [
106 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)',
107 'cacheinfo: Failed to find cpu0 device node',
108 ] + common_errors,
109 'qemuppc' : [
110 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
111 'host side 80-wire cable detection failed, limiting max speed',
112 'mode "640x480" test failed',
113 'Failed to load module "glx"',
114 'can\'t handle BAR above 4GB',
115 'Cannot reserve Legacy IO',
116 ] + common_errors,
117 'qemuarm' : [
118 'mmci-pl18x: probe of fpga:05 failed with error -22',
119 'mmci-pl18x: probe of fpga:0b failed with error -22',
120 'Failed to load module "glx"',
121 'OF: amba_device_add() failed (-19) for /amba/smc@10100000',
122 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000',
123 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000',
124 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000',
125 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000',
126 'OF: amba_device_add() failed (-19) for /amba/spi@101f4000',
127 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000',
128 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000',
129 'Failed to initialize \'/amba/timer@101e3000\': -22',
130 'jitterentropy: Initialization failed with host not compliant with requirements: 2',
131 ] + common_errors,
132 'qemuarm64' : [
133 'Fatal server error:',
134 '(EE) Server terminated with error (1). Closing log file.',
135 'dmi: Firmware registration failed.',
136 'irq: type mismatch, failed to map hwirq-27 for /intc',
137 'logind: failed to get session seat',
138 ] + common_errors,
139 'intel-core2-32' : [
140 'ACPI: No _BQC method, cannot determine initial brightness',
141 '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness',
142 '(EE) Failed to load module "psb"',
143 '(EE) Failed to load module psb',
144 '(EE) Failed to load module "psbdrv"',
145 '(EE) Failed to load module psbdrv',
146 '(EE) open /dev/fb0: No such file or directory',
147 '(EE) AIGLX: reverting to software rendering',
148 'dmi: Firmware registration failed.',
149 'ioremap error for 0x78',
150 ] + x86_common,
151 'intel-corei7-64' : [
152 'can\'t set Max Payload Size to 256',
153 'intel_punit_ipc: can\'t request region for resource',
154 '[drm] parse error at position 4 in video mode \'efifb\'',
155 'ACPI Error: Could not enable RealTimeClock event',
156 'ACPI Warning: Could not enable fixed event - RealTimeClock',
157 'hci_intel INT33E1:00: Unable to retrieve gpio',
158 'hci_intel: probe of INT33E1:00 failed',
159 'can\'t derive routing for PCI INT A',
160 'failed to read out thermal zone',
161 'Bluetooth: hci0: Setting Intel event mask failed',
162 'ttyS2 - failed to request DMA',
163 'Bluetooth: hci0: Failed to send firmware data (-38)',
164 'atkbd serio0: Failed to enable keyboard on isa0060/serio0',
165 ] + x86_common,
166 'genericx86' : x86_common,
167 'genericx86-64' : [
168 'Direct firmware load for i915',
169 'Failed to load firmware i915',
170 'Failed to fetch GuC',
171 'Failed to initialize GuC',
172 'Failed to load DMC firmware',
173 'The driver is built-in, so to load the firmware you need to',
174 ] + x86_common,
175 'edgerouter' : [
176 'not creating \'/sys/firmware/fdt\'',
177 'Failed to find cpu0 device node',
178 'Fatal server error:',
179 'Server terminated with error',
180 ] + common_errors,
181 'beaglebone-yocto' : [
182 'Direct firmware load for regulatory.db',
183 'failed to load regulatory.db',
184 'l4_wkup_cm',
185 'Failed to load module "glx"',
186 'Failed to make EGL context current',
187 'glamor initialization failed',
188 ] + common_errors,
189}
190 35
191log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"] 36 # The keywords that identify error messages in the log files
37 errors = ["error", "cannot", "can't", "failed"]
192 38
193class ParseLogsTest(OERuntimeTestCase): 39 # A list of error messages that should be ignored
40 ignore_errors = []
194 41
195 @classmethod 42 @classmethod
196 def setUpClass(cls): 43 def setUpClass(cls):
197 cls.errors = errors
198
199 # When systemd is enabled we need to notice errors on 44 # When systemd is enabled we need to notice errors on
200 # circular dependencies in units. 45 # circular dependencies in units.
201 if 'systemd' in cls.td.get('DISTRO_FEATURES', ''): 46 if 'systemd' in cls.td.get('DISTRO_FEATURES'):
202 cls.errors.extend([ 47 cls.errors.extend([
203 'Found ordering cycle on', 48 'Found ordering cycle on',
204 'Breaking ordering cycle by deleting job', 49 'Breaking ordering cycle by deleting job',
@@ -206,48 +51,22 @@ class ParseLogsTest(OERuntimeTestCase):
206 'Ordering cycle found, skipping', 51 'Ordering cycle found, skipping',
207 ]) 52 ])
208 53
209 cls.ignore_errors = ignore_errors 54 cls.errors = [s.casefold() for s in cls.errors]
210 cls.log_locations = log_locations
211 cls.msg = ''
212 is_lsb, _ = cls.tc.target.run("which LSB_Test.sh")
213 if is_lsb == 0:
214 for machine in cls.ignore_errors:
215 cls.ignore_errors[machine] = cls.ignore_errors[machine] \
216 + video_related
217
218 def getMachine(self):
219 return self.td.get('MACHINE', '')
220
221 def getWorkdir(self):
222 return self.td.get('WORKDIR', '')
223
224 # Get some information on the CPU of the machine to display at the
225 # beginning of the output. This info might be useful in some cases.
226 def getHardwareInfo(self):
227 hwi = ""
228 cmd = ('cat /proc/cpuinfo | grep "model name" | head -n1 | '
229 " awk 'BEGIN{FS=\":\"}{print $2}'")
230 _, cpu_name = self.target.run(cmd)
231
232 cmd = ('cat /proc/cpuinfo | grep "cpu cores" | head -n1 | '
233 "awk {'print $4'}")
234 _, cpu_physical_cores = self.target.run(cmd)
235
236 cmd = 'cat /proc/cpuinfo | grep "processor" | wc -l'
237 _, cpu_logical_cores = self.target.run(cmd)
238
239 _, cpu_arch = self.target.run('uname -m')
240 55
241 hwi += 'Machine information: \n' 56 cls.load_machine_ignores()
242 hwi += '*******************************\n'
243 hwi += 'Machine name: ' + self.getMachine() + '\n'
244 hwi += 'CPU: ' + str(cpu_name) + '\n'
245 hwi += 'Arch: ' + str(cpu_arch)+ '\n'
246 hwi += 'Physical cores: ' + str(cpu_physical_cores) + '\n'
247 hwi += 'Logical cores: ' + str(cpu_logical_cores) + '\n'
248 hwi += '*******************************\n'
249 57
250 return hwi 58 @classmethod
59 def load_machine_ignores(cls):
60 # Add TARGET_ARCH explicitly as not every machine has that in MACHINEOVERRDES (eg qemux86-64)
61 for candidate in ["common", cls.td.get("TARGET_ARCH")] + cls.td.get("MACHINEOVERRIDES").split(":"):
62 try:
63 name = f"parselogs-ignores-{candidate}.txt"
64 for line in _open_text("oeqa.runtime.cases", name):
65 line = line.strip()
66 if line and not line.startswith("#"):
67 cls.ignore_errors.append(line.casefold())
68 except FileNotFoundError:
69 pass
251 70
252 # Go through the log locations provided and if it's a folder 71 # Go through the log locations provided and if it's a folder
253 # create a list with all the .log files in it, if it's a file 72 # create a list with all the .log files in it, if it's a file
@@ -255,23 +74,23 @@ class ParseLogsTest(OERuntimeTestCase):
255 def getLogList(self, log_locations): 74 def getLogList(self, log_locations):
256 logs = [] 75 logs = []
257 for location in log_locations: 76 for location in log_locations:
258 status, _ = self.target.run('test -f ' + str(location)) 77 status, _ = self.target.run('test -f %s' % location)
259 if status == 0: 78 if status == 0:
260 logs.append(str(location)) 79 logs.append(location)
261 else: 80 else:
262 status, _ = self.target.run('test -d ' + str(location)) 81 status, _ = self.target.run('test -d %s' % location)
263 if status == 0: 82 if status == 0:
264 cmd = 'find ' + str(location) + '/*.log -maxdepth 1 -type f' 83 cmd = 'find %s -name \\*.log -maxdepth 1 -type f' % location
265 status, output = self.target.run(cmd) 84 status, output = self.target.run(cmd)
266 if status == 0: 85 if status == 0:
267 output = output.splitlines() 86 output = output.splitlines()
268 for logfile in output: 87 for logfile in output:
269 logs.append(os.path.join(location, str(logfile))) 88 logs.append(os.path.join(location, logfile))
270 return logs 89 return logs
271 90
272 # Copy the log files to be parsed locally 91 # Copy the log files to be parsed locally
273 def transfer_logs(self, log_list): 92 def transfer_logs(self, log_list):
274 workdir = self.getWorkdir() 93 workdir = self.td.get('WORKDIR')
275 self.target_logs = workdir + '/' + 'target_logs' 94 self.target_logs = workdir + '/' + 'target_logs'
276 target_logs = self.target_logs 95 target_logs = self.target_logs
277 if os.path.exists(target_logs): 96 if os.path.exists(target_logs):
@@ -288,65 +107,55 @@ class ParseLogsTest(OERuntimeTestCase):
288 logs = [f for f in dir_files if os.path.isfile(f)] 107 logs = [f for f in dir_files if os.path.isfile(f)]
289 return logs 108 return logs
290 109
291 # Build the grep command to be used with filters and exclusions 110 def get_context(self, lines, index, before=6, after=3):
292 def build_grepcmd(self, errors, ignore_errors, log): 111 """
293 grepcmd = 'grep ' 112 Given a set of lines and the index of the line that is important, return
294 grepcmd += '-Ei "' 113 a number of lines surrounding that line.
295 for error in errors: 114 """
296 grepcmd += '\<' + error + '\>' + '|' 115 last = len(lines)
297 grepcmd = grepcmd[:-1] 116
298 grepcmd += '" ' + str(log) + " | grep -Eiv \'" 117 start = index - before
299 118 end = index + after + 1
300 try: 119
301 errorlist = ignore_errors[self.getMachine()] 120 if start < 0:
302 except KeyError: 121 end -= start
303 self.msg += 'No ignore list found for this machine, using default\n' 122 start = 0
304 errorlist = ignore_errors['default'] 123 if end > last:
305 124 start -= end - last
306 for ignore_error in errorlist: 125 end = last
307 ignore_error = ignore_error.replace('(', '\(') 126
308 ignore_error = ignore_error.replace(')', '\)') 127 return lines[start:end]
309 ignore_error = ignore_error.replace("'", '.') 128
310 ignore_error = ignore_error.replace('?', '\?') 129 def test_get_context(self):
311 ignore_error = ignore_error.replace('[', '\[') 130 """
312 ignore_error = ignore_error.replace(']', '\]') 131 A test case for the test case.
313 ignore_error = ignore_error.replace('*', '\*') 132 """
314 ignore_error = ignore_error.replace('0-9', '[0-9]') 133 lines = list(range(0,10))
315 grepcmd += ignore_error + '|' 134 self.assertEqual(self.get_context(lines, 0, 2, 1), [0, 1, 2, 3])
316 grepcmd = grepcmd[:-1] 135 self.assertEqual(self.get_context(lines, 5, 2, 1), [3, 4, 5, 6])
317 grepcmd += "\'" 136 self.assertEqual(self.get_context(lines, 9, 2, 1), [6, 7, 8, 9])
318 137
319 return grepcmd 138 def parse_logs(self, logs, lines_before=10, lines_after=10):
320 139 """
321 # Grep only the errors so that their context could be collected. 140 Search the log files @logs looking for error lines (marked by
322 # Default context is 10 lines before and after the error itself 141 @self.errors), ignoring anything listed in @self.ignore_errors.
323 def parse_logs(self, errors, ignore_errors, logs, 142
324 lines_before = 10, lines_after = 10): 143 Returns a dictionary of log filenames to a dictionary of error lines to
325 results = {} 144 the error context (controlled by @lines_before and @lines_after).
326 rez = [] 145 """
327 grep_output = '' 146 results = collections.defaultdict(dict)
328 147
329 for log in logs: 148 for log in logs:
330 result = None 149 with open(log) as f:
331 thegrep = self.build_grepcmd(errors, ignore_errors, log) 150 lines = f.readlines()
332 151
333 try: 152 for i, line in enumerate(lines):
334 result = check_output(thegrep, shell=True).decode('utf-8') 153 line = line.strip()
335 except: 154 line_lower = line.casefold()
336 pass
337 155
338 if result is not None: 156 if any(keyword in line_lower for keyword in self.errors):
339 results[log] = {} 157 if not any(ignore in line_lower for ignore in self.ignore_errors):
340 rez = result.splitlines() 158 results[log][line] = "".join(self.get_context(lines, i, lines_before, lines_after))
341
342 for xrez in rez:
343 try:
344 cmd = ['grep', '-F', xrez, '-B', str(lines_before)]
345 cmd += ['-A', str(lines_after), log]
346 grep_output = check_output(cmd).decode('utf-8')
347 except:
348 pass
349 results[log][xrez]=grep_output
350 159
351 return results 160 return results
352 161
@@ -359,17 +168,18 @@ class ParseLogsTest(OERuntimeTestCase):
359 def test_parselogs(self): 168 def test_parselogs(self):
360 self.write_dmesg() 169 self.write_dmesg()
361 log_list = self.get_local_log_list(self.log_locations) 170 log_list = self.get_local_log_list(self.log_locations)
362 result = self.parse_logs(self.errors, self.ignore_errors, log_list) 171 result = self.parse_logs(log_list)
363 print(self.getHardwareInfo()) 172
364 errcount = 0 173 errcount = 0
174 self.msg = ""
365 for log in result: 175 for log in result:
366 self.msg += 'Log: ' + log + '\n' 176 self.msg += 'Log: ' + log + '\n'
367 self.msg += '-----------------------\n' 177 self.msg += '-----------------------\n'
368 for error in result[log]: 178 for error in result[log]:
369 errcount += 1 179 errcount += 1
370 self.msg += 'Central error: ' + str(error) + '\n' 180 self.msg += 'Central error: ' + error + '\n'
371 self.msg += '***********************\n' 181 self.msg += '***********************\n'
372 self.msg += result[str(log)][str(error)] + '\n' 182 self.msg += result[log][error] + '\n'
373 self.msg += '***********************\n' 183 self.msg += '***********************\n'
374 self.msg += '%s errors found in logs.' % errcount 184 self.msg += '%s errors found in logs.' % errcount
375 self.assertEqual(errcount, 0, msg=self.msg) 185 self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/cases/perl.py b/meta/lib/oeqa/runtime/cases/perl.py
index 2c6b3b7846..f11b300836 100644
--- a/meta/lib/oeqa/runtime/cases/perl.py
+++ b/meta/lib/oeqa/runtime/cases/perl.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/ping.py b/meta/lib/oeqa/runtime/cases/ping.py
index f6603f75ec..f72460e7f3 100644
--- a/meta/lib/oeqa/runtime/cases/ping.py
+++ b/meta/lib/oeqa/runtime/cases/ping.py
@@ -1,11 +1,15 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from subprocess import Popen, PIPE 7from subprocess import Popen, PIPE
8from time import sleep
6 9
7from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.oetimeout import OETimeout 11from oeqa.core.decorator.oetimeout import OETimeout
12from oeqa.core.exception import OEQATimeoutError
9 13
10class PingTest(OERuntimeTestCase): 14class PingTest(OERuntimeTestCase):
11 15
@@ -13,14 +17,19 @@ class PingTest(OERuntimeTestCase):
13 def test_ping(self): 17 def test_ping(self):
14 output = '' 18 output = ''
15 count = 0 19 count = 0
16 while count < 5: 20 self.assertNotEqual(len(self.target.ip), 0, msg="No target IP address set")
17 cmd = 'ping -c 1 %s' % self.target.ip 21 try:
18 proc = Popen(cmd, shell=True, stdout=PIPE) 22 while count < 5:
19 output += proc.communicate()[0].decode('utf-8') 23 cmd = 'ping -c 1 %s' % self.target.ip
20 if proc.poll() == 0: 24 proc = Popen(cmd, shell=True, stdout=PIPE)
21 count += 1 25 output += proc.communicate()[0].decode('utf-8')
22 else: 26 if proc.poll() == 0:
23 count = 0 27 count += 1
28 else:
29 count = 0
30 sleep(1)
31 except OEQATimeoutError:
32 self.fail("Ping timeout error for address %s, count %s, output: %s" % (self.target.ip, count, output))
24 msg = ('Expected 5 consecutive, got %d.\n' 33 msg = ('Expected 5 consecutive, got %d.\n'
25 'ping output is:\n%s' % (count,output)) 34 'ping output is:\n%s' % (count,output))
26 self.assertEqual(count, 5, msg = msg) 35 self.assertEqual(count, 5, msg = msg)
diff --git a/meta/lib/oeqa/runtime/cases/ptest.py b/meta/lib/oeqa/runtime/cases/ptest.py
index 0800f3c27f..fbaeb84d00 100644
--- a/meta/lib/oeqa/runtime/cases/ptest.py
+++ b/meta/lib/oeqa/runtime/cases/ptest.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -57,7 +59,7 @@ class PtestRunnerTest(OERuntimeTestCase):
57 ptest_dirs = [ '/usr/lib' ] 59 ptest_dirs = [ '/usr/lib' ]
58 if not libdir in ptest_dirs: 60 if not libdir in ptest_dirs:
59 ptest_dirs.append(libdir) 61 ptest_dirs.append(libdir)
60 status, output = self.target.run('ptest-runner -d \"{}\"'.format(' '.join(ptest_dirs)), 0) 62 status, output = self.target.run('ptest-runner -t 450 -d \"{}\"'.format(' '.join(ptest_dirs)), 0)
61 os.makedirs(ptest_log_dir) 63 os.makedirs(ptest_log_dir)
62 with open(ptest_runner_log, 'w') as f: 64 with open(ptest_runner_log, 'w') as f:
63 f.write(output) 65 f.write(output)
@@ -81,17 +83,20 @@ class PtestRunnerTest(OERuntimeTestCase):
81 83
82 extras['ptestresult.sections'] = sections 84 extras['ptestresult.sections'] = sections
83 85
86 zerolength = []
84 trans = str.maketrans("()", "__") 87 trans = str.maketrans("()", "__")
85 for section in results: 88 for section in results:
86 for test in results[section]: 89 for test in results[section]:
87 result = results[section][test] 90 result = results[section][test]
88 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split()) 91 testname = "ptestresult." + (section or "No-section") + "." + "_".join(test.translate(trans).split())
89 extras[testname] = {'status': result} 92 extras[testname] = {'status': result}
93 if not results[section]:
94 zerolength.append(section)
90 95
91 failed_tests = {} 96 failed_tests = {}
92 97
93 for section in sections: 98 for section in sections:
94 if 'exitcode' in sections[section].keys(): 99 if 'exitcode' in sections[section].keys() or 'timeout' in sections[section].keys():
95 failed_tests[section] = sections[section]["log"] 100 failed_tests[section] = sections[section]["log"]
96 101
97 for section in results: 102 for section in results:
@@ -105,7 +110,10 @@ class PtestRunnerTest(OERuntimeTestCase):
105 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output 110 failmsg = "ERROR: Processes were killed by the OOM Killer:\n%s\n" % output
106 111
107 if failed_tests: 112 if failed_tests:
108 failmsg = failmsg + "Failed ptests:\n%s" % pprint.pformat(failed_tests) 113 failmsg = failmsg + "\nFailed ptests:\n%s\n" % pprint.pformat(failed_tests)
114
115 if zerolength:
116 failmsg = failmsg + "\nptests which had no test results:\n%s" % pprint.pformat(zerolength)
109 117
110 if failmsg: 118 if failmsg:
111 self.logger.warning("There were failing ptests.") 119 self.logger.warning("There were failing ptests.")
diff --git a/meta/lib/oeqa/runtime/cases/python.py b/meta/lib/oeqa/runtime/cases/python.py
index ec54f1e1db..5d6d133480 100644
--- a/meta/lib/oeqa/runtime/cases/python.py
+++ b/meta/lib/oeqa/runtime/cases/python.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/rpm.py b/meta/lib/oeqa/runtime/cases/rpm.py
index 8e18b426f8..ea5619ffea 100644
--- a/meta/lib/oeqa/runtime/cases/rpm.py
+++ b/meta/lib/oeqa/runtime/cases/rpm.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -49,21 +51,20 @@ class RpmBasicTest(OERuntimeTestCase):
49 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) 51 msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output)
50 self.assertEqual(status, 0, msg=msg) 52 self.assertEqual(status, 0, msg=msg)
51 53
52 def check_no_process_for_user(u): 54 def wait_for_no_process_for_user(u, timeout = 120):
53 _, output = self.target.run(self.tc.target_cmds['ps']) 55 timeout_at = time.time() + timeout
54 if u + ' ' in output: 56 while time.time() < timeout_at:
55 return False 57 _, output = self.target.run(self.tc.target_cmds['ps'])
56 else: 58 if u + ' ' not in output:
57 return True 59 return
60 time.sleep(1)
61 user_pss = [ps for ps in output.split("\n") if u + ' ' in ps]
62 msg = "User %s has processes still running: %s" % (u, "\n".join(user_pss))
63 self.fail(msg=msg)
58 64
59 def unset_up_test_user(u): 65 def unset_up_test_user(u):
60 # ensure no test1 process in running 66 # ensure no test1 process in running
61 timeout = time.time() + 30 67 wait_for_no_process_for_user(u)
62 while time.time() < timeout:
63 if check_no_process_for_user(u):
64 break
65 else:
66 time.sleep(1)
67 status, output = self.target.run('userdel -r %s' % u) 68 status, output = self.target.run('userdel -r %s' % u)
68 msg = 'Failed to erase user: %s' % output 69 msg = 'Failed to erase user: %s' % output
69 self.assertTrue(status == 0, msg=msg) 70 self.assertTrue(status == 0, msg=msg)
@@ -79,21 +80,24 @@ class RpmBasicTest(OERuntimeTestCase):
79 80
80class RpmInstallRemoveTest(OERuntimeTestCase): 81class RpmInstallRemoveTest(OERuntimeTestCase):
81 82
82 @classmethod 83 def _find_test_file(self):
83 def setUpClass(cls): 84 pkgarch = self.td['TUNE_PKGARCH'].replace('-', '_')
84 pkgarch = cls.td['TUNE_PKGARCH'].replace('-', '_') 85 rpmdir = os.path.join(self.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
85 rpmdir = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm', pkgarch)
86 # Pick base-passwd-doc as a test file to get installed, because it's small 86 # Pick base-passwd-doc as a test file to get installed, because it's small
87 # and it will always be built for standard targets 87 # and it will always be built for standard targets
88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch 88 rpm_doc = 'base-passwd-doc-*.%s.rpm' % pkgarch
89 if not os.path.exists(rpmdir): 89 if not os.path.exists(rpmdir):
90 return 90 self.fail("Rpm directory {} does not exist".format(rpmdir))
91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc): 91 for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc):
92 cls.test_file = os.path.join(rpmdir, f) 92 self.test_file = os.path.join(rpmdir, f)
93 cls.dst = '/tmp/base-passwd-doc.rpm' 93 break
94 else:
95 self.fail("Couldn't find the test rpm file {} in {}".format(rpm_doc, rpmdir))
96 self.dst = '/tmp/base-passwd-doc.rpm'
94 97
95 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query']) 98 @OETestDepends(['rpm.RpmBasicTest.test_rpm_query'])
96 def test_rpm_install(self): 99 def test_rpm_install(self):
100 self._find_test_file()
97 self.tc.target.copyTo(self.test_file, self.dst) 101 self.tc.target.copyTo(self.test_file, self.dst)
98 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm') 102 status, output = self.target.run('rpm -ivh /tmp/base-passwd-doc.rpm')
99 msg = 'Failed to install base-passwd-doc package: %s' % output 103 msg = 'Failed to install base-passwd-doc package: %s' % output
@@ -116,12 +120,13 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
116 Author: Alexander Kanavin <alex.kanavin@gmail.com> 120 Author: Alexander Kanavin <alex.kanavin@gmail.com>
117 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com> 121 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
118 """ 122 """
119 db_files_cmd = 'ls /var/lib/rpm/__db.*' 123 self._find_test_file()
124 db_files_cmd = 'ls /var/lib/rpm/rpmdb.sqlite*'
120 check_log_cmd = "grep RPM /var/log/messages | wc -l" 125 check_log_cmd = "grep RPM /var/log/messages | wc -l"
121 126
122 # Make sure that some database files are under /var/lib/rpm as '__db.xxx' 127 # Make sure that some database files are under /var/lib/rpm as 'rpmdb.sqlite'
123 status, output = self.target.run(db_files_cmd) 128 status, output = self.target.run(db_files_cmd)
124 msg = 'Failed to find database files under /var/lib/rpm/ as __db.xxx' 129 msg = 'Failed to find database files under /var/lib/rpm/ as rpmdb.sqlite'
125 self.assertEqual(0, status, msg=msg) 130 self.assertEqual(0, status, msg=msg)
126 131
127 self.tc.target.copyTo(self.test_file, self.dst) 132 self.tc.target.copyTo(self.test_file, self.dst)
@@ -141,13 +146,4 @@ class RpmInstallRemoveTest(OERuntimeTestCase):
141 146
142 self.tc.target.run('rm -f %s' % self.dst) 147 self.tc.target.run('rm -f %s' % self.dst)
143 148
144 # if using systemd this should ensure all entries are flushed to /var
145 status, output = self.target.run("journalctl --sync")
146 # Get the amount of entries in the log file
147 status, output = self.target.run(check_log_cmd)
148 msg = 'Failed to get the final size of the log file.'
149 self.assertEqual(0, status, msg=msg)
150 149
151 # Check that there's enough of them
152 self.assertGreaterEqual(int(output), 80,
153 'Cound not find sufficient amount of rpm entries in /var/log/messages, found {} entries'.format(output))
diff --git a/meta/lib/oeqa/runtime/cases/rt.py b/meta/lib/oeqa/runtime/cases/rt.py
new file mode 100644
index 0000000000..15ab4dbbbb
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rt.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9
10class RtTest(OERuntimeTestCase):
11 @OETestDepends(['ssh.SSHTest.test_ssh'])
12 def test_is_rt(self):
13 """
14 Check that the kernel has CONFIG_PREEMPT_RT enabled.
15 """
16 status, output = self.target.run("uname -a")
17 self.assertEqual(status, 0, msg=output)
18 # Split so we don't get a substring false-positive
19 self.assertIn("PREEMPT_RT", output.split())
diff --git a/meta/lib/oeqa/runtime/cases/rtc.py b/meta/lib/oeqa/runtime/cases/rtc.py
index a34c101a9d..6e45c5db4f 100644
--- a/meta/lib/oeqa/runtime/cases/rtc.py
+++ b/meta/lib/oeqa/runtime/cases/rtc.py
@@ -1,5 +1,11 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
8from oeqa.core.decorator.data import skipIfFeature
3from oeqa.runtime.decorator.package import OEHasPackage 9from oeqa.runtime.decorator.package import OEHasPackage
4 10
5import re 11import re
@@ -9,19 +15,21 @@ class RTCTest(OERuntimeTestCase):
9 def setUp(self): 15 def setUp(self):
10 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 16 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
11 self.logger.debug('Stopping systemd-timesyncd daemon') 17 self.logger.debug('Stopping systemd-timesyncd daemon')
12 self.target.run('systemctl disable --now systemd-timesyncd') 18 self.target.run('systemctl disable --now --runtime systemd-timesyncd')
13 19
14 def tearDown(self): 20 def tearDown(self):
15 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': 21 if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd':
16 self.logger.debug('Starting systemd-timesyncd daemon') 22 self.logger.debug('Starting systemd-timesyncd daemon')
17 self.target.run('systemctl enable --now systemd-timesyncd') 23 self.target.run('systemctl enable --now --runtime systemd-timesyncd')
18 24
25 @skipIfFeature('read-only-rootfs',
26 'Test does not work with read-only-rootfs in IMAGE_FEATURES')
19 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
20 @OEHasPackage(['coreutils', 'busybox']) 28 @OEHasPackage(['coreutils', 'busybox'])
21 def test_rtc(self): 29 def test_rtc(self):
22 (status, output) = self.target.run('hwclock -r') 30 (status, output) = self.target.run('hwclock -r')
23 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output) 31 self.assertEqual(status, 0, msg='Failed to get RTC time, output: %s' % output)
24 32
25 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"') 33 (status, current_datetime) = self.target.run('date +"%m%d%H%M%Y"')
26 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime) 34 self.assertEqual(status, 0, msg='Failed to get system current date & time, output: %s' % current_datetime)
27 35
@@ -32,7 +40,6 @@ class RTCTest(OERuntimeTestCase):
32 40
33 (status, output) = self.target.run('date %s' % current_datetime) 41 (status, output) = self.target.run('date %s' % current_datetime)
34 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output) 42 self.assertEqual(status, 0, msg='Failed to reset system date & time, output: %s' % output)
35 43
36 (status, output) = self.target.run('hwclock -w') 44 (status, output) = self.target.run('hwclock -w')
37 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output) 45 self.assertEqual(status, 0, msg='Failed to reset RTC time, output: %s' % output)
38
diff --git a/meta/lib/oeqa/runtime/cases/runlevel.py b/meta/lib/oeqa/runtime/cases/runlevel.py
index 3a4df8ace1..6734b0f5ed 100644
--- a/meta/lib/oeqa/runtime/cases/runlevel.py
+++ b/meta/lib/oeqa/runtime/cases/runlevel.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3 8
diff --git a/meta/lib/oeqa/runtime/cases/rust.py b/meta/lib/oeqa/runtime/cases/rust.py
new file mode 100644
index 0000000000..123c942012
--- /dev/null
+++ b/meta/lib/oeqa/runtime/cases/rust.py
@@ -0,0 +1,64 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.runtime.decorator.package import OEHasPackage
10
11class RustCompileTest(OERuntimeTestCase):
12
13 @classmethod
14 def setUp(cls):
15 dst = '/tmp/'
16 src = os.path.join(cls.tc.files_dir, 'test.rs')
17 cls.tc.target.copyTo(src, dst)
18
19 @classmethod
20 def tearDown(cls):
21 files = '/tmp/test.rs /tmp/test'
22 cls.tc.target.run('rm %s' % files)
23 dirs = '/tmp/hello'
24 cls.tc.target.run('rm -r %s' % dirs)
25
26 @OETestDepends(['ssh.SSHTest.test_ssh'])
27 @OEHasPackage('rust')
28 @OEHasPackage('openssh-scp')
29 def test_rust_compile(self):
30 status, output = self.target.run('rustc /tmp/test.rs -o /tmp/test')
31 msg = 'rust compile failed, output: %s' % output
32 self.assertEqual(status, 0, msg=msg)
33
34 status, output = self.target.run('/tmp/test')
35 msg = 'running compiled file failed, output: %s' % output
36 self.assertEqual(status, 0, msg=msg)
37
38 @OETestDepends(['ssh.SSHTest.test_ssh'])
39 @OEHasPackage('cargo')
40 @OEHasPackage('openssh-scp')
41 def test_cargo_compile(self):
42 status, output = self.target.run('cargo new /tmp/hello')
43 msg = 'cargo new failed, output: %s' % output
44 self.assertEqual(status, 0, msg=msg)
45
46 status, output = self.target.run('cargo build --manifest-path=/tmp/hello/Cargo.toml')
47 msg = 'cargo build failed, output: %s' % output
48 self.assertEqual(status, 0, msg=msg)
49
50 status, output = self.target.run('cargo run --manifest-path=/tmp/hello/Cargo.toml')
51 msg = 'running compiled file failed, output: %s' % output
52 self.assertEqual(status, 0, msg=msg)
53
54class RustCLibExampleTest(OERuntimeTestCase):
55 @OETestDepends(['ssh.SSHTest.test_ssh'])
56 @OEHasPackage('rust-c-lib-example-bin')
57 def test_rust_c_lib_example(self):
58 cmd = "rust-c-lib-example-bin test"
59 status, output = self.target.run(cmd)
60 msg = 'Exit status was not 0. Output: %s' % output
61 self.assertEqual(status, 0, msg=msg)
62
63 msg = 'Incorrect output: %s' % output
64 self.assertEqual(output, "Hello world in rust from C!", msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/scons.py b/meta/lib/oeqa/runtime/cases/scons.py
index 3c7c7f7270..4a8d4d40ba 100644
--- a/meta/lib/oeqa/runtime/cases/scons.py
+++ b/meta/lib/oeqa/runtime/cases/scons.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/scp.py b/meta/lib/oeqa/runtime/cases/scp.py
index 3a5f292152..ee97b8ef66 100644
--- a/meta/lib/oeqa/runtime/cases/scp.py
+++ b/meta/lib/oeqa/runtime/cases/scp.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -23,7 +25,7 @@ class ScpTest(OERuntimeTestCase):
23 os.remove(cls.tmp_path) 25 os.remove(cls.tmp_path)
24 26
25 @OETestDepends(['ssh.SSHTest.test_ssh']) 27 @OETestDepends(['ssh.SSHTest.test_ssh'])
26 @OEHasPackage(['openssh-scp', 'dropbear']) 28 @OEHasPackage(['openssh-scp'])
27 def test_scp_file(self): 29 def test_scp_file(self):
28 dst = '/tmp/test_scp_file' 30 dst = '/tmp/test_scp_file'
29 31
diff --git a/meta/lib/oeqa/runtime/cases/skeletoninit.py b/meta/lib/oeqa/runtime/cases/skeletoninit.py
index 4779cd6bb4..d0fdcbded9 100644
--- a/meta/lib/oeqa/runtime/cases/skeletoninit.py
+++ b/meta/lib/oeqa/runtime/cases/skeletoninit.py
@@ -1,10 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 7# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284
6# testcase. Image under test must have meta-skeleton layer in bblayers and 8# testcase. Image under test must have meta-skeleton layer in bblayers and
7# IMAGE_INSTALL_append = " service" in local.conf 9# IMAGE_INSTALL:append = " service" in local.conf
8from oeqa.runtime.case import OERuntimeTestCase 10from oeqa.runtime.case import OERuntimeTestCase
9from oeqa.core.decorator.depends import OETestDepends 11from oeqa.core.decorator.depends import OETestDepends
10from oeqa.core.decorator.data import skipIfDataVar 12from oeqa.core.decorator.data import skipIfDataVar
@@ -15,7 +17,7 @@ class SkeletonBasicTest(OERuntimeTestCase):
15 @OETestDepends(['ssh.SSHTest.test_ssh']) 17 @OETestDepends(['ssh.SSHTest.test_ssh'])
16 @OEHasPackage(['service']) 18 @OEHasPackage(['service'])
17 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', 19 @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd',
18 'Not appropiate for systemd image') 20 'Not appropriate for systemd image')
19 def test_skeleton_availability(self): 21 def test_skeleton_availability(self):
20 status, output = self.target.run('ls /etc/init.d/skeleton') 22 status, output = self.target.run('ls /etc/init.d/skeleton')
21 msg = 'skeleton init script not found. Output:\n%s' % output 23 msg = 'skeleton init script not found. Output:\n%s' % output
diff --git a/meta/lib/oeqa/runtime/cases/ssh.py b/meta/lib/oeqa/runtime/cases/ssh.py
index 60a5fbbfbf..cdbef59500 100644
--- a/meta/lib/oeqa/runtime/cases/ssh.py
+++ b/meta/lib/oeqa/runtime/cases/ssh.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -11,9 +13,12 @@ class SSHTest(OERuntimeTestCase):
11 @OETestDepends(['ping.PingTest.test_ping']) 13 @OETestDepends(['ping.PingTest.test_ping'])
12 @OEHasPackage(['dropbear', 'openssh-sshd']) 14 @OEHasPackage(['dropbear', 'openssh-sshd'])
13 def test_ssh(self): 15 def test_ssh(self):
16 (status, output) = self.target.run('sleep 20', timeout=2)
17 msg='run() timed out but return code was zero.'
18 self.assertNotEqual(status, 0, msg=msg)
14 (status, output) = self.target.run('uname -a') 19 (status, output) = self.target.run('uname -a')
15 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) 20 self.assertEqual(status, 0, msg='SSH Test failed: %s' % output)
16 (status, output) = self.target.run('cat /etc/masterimage') 21 (status, output) = self.target.run('cat /etc/controllerimage')
17 msg = "This isn't the right image - /etc/masterimage " \ 22 msg = "This isn't the right image - /etc/controllerimage " \
18 "shouldn't be here %s" % output 23 "shouldn't be here %s" % output
19 self.assertEqual(status, 1, msg=msg) 24 self.assertEqual(status, 1, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/stap.py b/meta/lib/oeqa/runtime/cases/stap.py
index 5342f6ac34..3be4162108 100644
--- a/meta/lib/oeqa/runtime/cases/stap.py
+++ b/meta/lib/oeqa/runtime/cases/stap.py
@@ -1,37 +1,34 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.runtime.case import OERuntimeTestCase 9from oeqa.runtime.case import OERuntimeTestCase
8from oeqa.core.decorator.depends import OETestDepends
9from oeqa.core.decorator.data import skipIfNotFeature 10from oeqa.core.decorator.data import skipIfNotFeature
10from oeqa.runtime.decorator.package import OEHasPackage 11from oeqa.runtime.decorator.package import OEHasPackage
11 12
12class StapTest(OERuntimeTestCase): 13class StapTest(OERuntimeTestCase):
13 14 @skipIfNotFeature('tools-profile', 'Test requires tools-profile to be in IMAGE_FEATURES')
14 @classmethod
15 def setUp(cls):
16 src = os.path.join(cls.tc.runtime_files_dir, 'hello.stp')
17 dst = '/tmp/hello.stp'
18 cls.tc.target.copyTo(src, dst)
19
20 @classmethod
21 def tearDown(cls):
22 files = '/tmp/hello.stp'
23 cls.tc.target.run('rm %s' % files)
24
25 @skipIfNotFeature('tools-profile',
26 'Test requires tools-profile to be in IMAGE_FEATURES')
27 @OETestDepends(['kernelmodule.KernelModuleTest.test_kernel_module'])
28 @OEHasPackage(['systemtap']) 15 @OEHasPackage(['systemtap'])
16 @OEHasPackage(['gcc-symlinks'])
17 @OEHasPackage(['kernel-devsrc'])
29 def test_stap(self): 18 def test_stap(self):
30 cmds = [ 19 try:
31 'cd /usr/src/kernel && make scripts prepare', 20 cmd = 'make -j -C /usr/src/kernel scripts prepare'
32 'cd /lib/modules/`uname -r` && (if [ ! -e build ]; then ln -s /usr/src/kernel build; fi)',
33 'stap --disable-cache -DSTP_NO_VERREL_CHECK /tmp/hello.stp'
34 ]
35 for cmd in cmds:
36 status, output = self.target.run(cmd, 900) 21 status, output = self.target.run(cmd, 900)
37 self.assertEqual(status, 0, msg='\n'.join([cmd, output])) 22 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
23
24 cmd = 'stap -v -p4 -m stap-hello --disable-cache -DSTP_NO_VERREL_CHECK -e \'probe oneshot { print("Hello, "); println("SystemTap!") }\''
25 status, output = self.target.run(cmd, 900)
26 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
27
28 cmd = 'staprun -v -R -b1 stap-hello.ko'
29 self.assertEqual(status, 0, msg='\n'.join([cmd, output]))
30 self.assertIn('Hello, SystemTap!', output, msg='\n'.join([cmd, output]))
31 except:
32 status, dmesg = self.target.run('dmesg')
33 if status == 0:
34 print(dmesg)
diff --git a/meta/lib/oeqa/runtime/cases/storage.py b/meta/lib/oeqa/runtime/cases/storage.py
index 166d26b252..b05622fea8 100644
--- a/meta/lib/oeqa/runtime/cases/storage.py
+++ b/meta/lib/oeqa/runtime/cases/storage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -91,24 +93,24 @@ class UsbTest(StorageBase):
91 self.test_file = "usb.tst" 93 self.test_file = "usb.tst"
92 self.test_dir = os.path.join(self.mount_point, "oeqa") 94 self.test_dir = os.path.join(self.mount_point, "oeqa")
93 95
94 @skipIfQemu('qemuall', 'Test only runs on real hardware') 96 @skipIfQemu()
95 @OETestDepends(['ssh.SSHTest.test_ssh']) 97 @OETestDepends(['ssh.SSHTest.test_ssh'])
96 def test_usb_mount(self): 98 def test_usb_mount(self):
97 self.storage_umount(2) 99 self.storage_umount(2)
98 self.storage_mount(5) 100 self.storage_mount(5)
99 101
100 @skipIfQemu('qemuall', 'Test only runs on real hardware') 102 @skipIfQemu()
101 @OETestDepends(['storage.UsbTest.test_usb_mount']) 103 @OETestDepends(['storage.UsbTest.test_usb_mount'])
102 def test_usb_basic_operations(self): 104 def test_usb_basic_operations(self):
103 self.storage_basic() 105 self.storage_basic()
104 106
105 @skipIfQemu('qemuall', 'Test only runs on real hardware') 107 @skipIfQemu()
106 @OETestDepends(['storage.UsbTest.test_usb_basic_operations']) 108 @OETestDepends(['storage.UsbTest.test_usb_basic_operations'])
107 def test_usb_basic_rw(self): 109 def test_usb_basic_rw(self):
108 self.storage_write() 110 self.storage_write()
109 self.storage_read() 111 self.storage_read()
110 112
111 @skipIfQemu('qemuall', 'Test only runs on real hardware') 113 @skipIfQemu()
112 @OETestDepends(['storage.UsbTest.test_usb_mount']) 114 @OETestDepends(['storage.UsbTest.test_usb_mount'])
113 def test_usb_umount(self): 115 def test_usb_umount(self):
114 self.storage_umount(2) 116 self.storage_umount(2)
@@ -126,24 +128,24 @@ class MMCTest(StorageBase):
126 self.test_file = "mmc.tst" 128 self.test_file = "mmc.tst"
127 self.test_dir = os.path.join(self.mount_point, "oeqa") 129 self.test_dir = os.path.join(self.mount_point, "oeqa")
128 130
129 @skipIfQemu('qemuall', 'Test only runs on real hardware') 131 @skipIfQemu()
130 @OETestDepends(['ssh.SSHTest.test_ssh']) 132 @OETestDepends(['ssh.SSHTest.test_ssh'])
131 def test_mmc_mount(self): 133 def test_mmc_mount(self):
132 self.storage_umount(2) 134 self.storage_umount(2)
133 self.storage_mount() 135 self.storage_mount()
134 136
135 @skipIfQemu('qemuall', 'Test only runs on real hardware') 137 @skipIfQemu()
136 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 138 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
137 def test_mmc_basic_operations(self): 139 def test_mmc_basic_operations(self):
138 self.storage_basic() 140 self.storage_basic()
139 141
140 @skipIfQemu('qemuall', 'Test only runs on real hardware') 142 @skipIfQemu()
141 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations']) 143 @OETestDepends(['storage.MMCTest.test_mmc_basic_operations'])
142 def test_mmc_basic_rw(self): 144 def test_mmc_basic_rw(self):
143 self.storage_write() 145 self.storage_write()
144 self.storage_read() 146 self.storage_read()
145 147
146 @skipIfQemu('qemuall', 'Test only runs on real hardware') 148 @skipIfQemu()
147 @OETestDepends(['storage.MMCTest.test_mmc_mount']) 149 @OETestDepends(['storage.MMCTest.test_mmc_mount'])
148 def test_mmc_umount(self): 150 def test_mmc_umount(self):
149 self.storage_umount(2) 151 self.storage_umount(2)
diff --git a/meta/lib/oeqa/runtime/cases/suspend.py b/meta/lib/oeqa/runtime/cases/suspend.py
index 67b6f7e56f..a625cc5901 100644
--- a/meta/lib/oeqa/runtime/cases/suspend.py
+++ b/meta/lib/oeqa/runtime/cases/suspend.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -23,7 +28,7 @@ class Suspend_Test(OERuntimeTestCase):
23 (status, output) = self.target.run('sudo rtcwake -m mem -s 10') 28 (status, output) = self.target.run('sudo rtcwake -m mem -s 10')
24 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 29 self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
25 30
26 @skipIfQemu('qemuall', 'Test only runs on real hardware') 31 @skipIfQemu()
27 @OETestDepends(['ssh.SSHTest.test_ssh']) 32 @OETestDepends(['ssh.SSHTest.test_ssh'])
28 def test_suspend(self): 33 def test_suspend(self):
29 self.test_date() 34 self.test_date()
diff --git a/meta/lib/oeqa/runtime/cases/systemd.py b/meta/lib/oeqa/runtime/cases/systemd.py
index 7c44abe8ed..5481e1d840 100644
--- a/meta/lib/oeqa/runtime/cases/systemd.py
+++ b/meta/lib/oeqa/runtime/cases/systemd.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import re 7import re
8import threading
6import time 9import time
7 10
8from oeqa.runtime.case import OERuntimeTestCase 11from oeqa.runtime.case import OERuntimeTestCase
@@ -66,8 +69,8 @@ class SystemdBasicTests(SystemdTest):
66 """ 69 """
67 endtime = time.time() + (60 * 2) 70 endtime = time.time() + (60 * 2)
68 while True: 71 while True:
69 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl --state=activating') 72 status, output = self.target.run('SYSTEMD_BUS_TIMEOUT=240s systemctl is-system-running')
70 if "0 loaded units listed" in output: 73 if "running" in output or "degraded" in output:
71 return (True, '') 74 return (True, '')
72 if time.time() >= endtime: 75 if time.time() >= endtime:
73 return (False, output) 76 return (False, output)
@@ -134,6 +137,27 @@ class SystemdServiceTests(SystemdTest):
134 status = self.target.run('mount -oro,remount /')[0] 137 status = self.target.run('mount -oro,remount /')[0]
135 self.assertTrue(status == 0, msg='Remounting / as r/o failed') 138 self.assertTrue(status == 0, msg='Remounting / as r/o failed')
136 139
140 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
141 @skipIfNotFeature('minidebuginfo', 'Test requires minidebuginfo to be in DISTRO_FEATURES')
142 @OEHasPackage(['busybox'])
143 def test_systemd_coredump_minidebuginfo(self):
144 """
145 Verify that call-stacks generated by systemd-coredump contain symbolicated call-stacks,
146 extracted from the minidebuginfo metadata (.gnu_debugdata elf section).
147 """
148 t_thread = threading.Thread(target=self.target.run, args=("ulimit -c unlimited && sleep 1000",))
149 t_thread.start()
150 time.sleep(1)
151
152 status, output = self.target.run('pidof sleep')
153 # cause segfault on purpose
154 self.target.run('kill -SEGV %s' % output)
155 self.assertEqual(status, 0, msg = 'Not able to find process that runs sleep, output : %s' % output)
156
157 (status, output) = self.target.run('coredumpctl info')
158 self.assertEqual(status, 0, msg='MiniDebugInfo Test failed: %s' % output)
159 self.assertEqual('sleep_for_duration (busybox.nosuid' in output, True, msg='Call stack is missing minidebuginfo symbols (functions shown as "n/a"): %s' % output)
160
137class SystemdJournalTests(SystemdTest): 161class SystemdJournalTests(SystemdTest):
138 162
139 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) 163 @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic'])
@@ -152,7 +176,7 @@ class SystemdJournalTests(SystemdTest):
152 """ 176 """
153 177
154 # The expression chain that uniquely identifies the time boot message. 178 # The expression chain that uniquely identifies the time boot message.
155 expr_items=['Startup finished', 'kernel', 'userspace','\.$'] 179 expr_items=['Startup finished', 'kernel', 'userspace', r'\.$']
156 try: 180 try:
157 output = self.journalctl(args='-o cat --reverse') 181 output = self.journalctl(args='-o cat --reverse')
158 except AssertionError: 182 except AssertionError:
diff --git a/meta/lib/oeqa/runtime/cases/terminal.py b/meta/lib/oeqa/runtime/cases/terminal.py
index 8fcca99f47..96ba3c3195 100644
--- a/meta/lib/oeqa/runtime/cases/terminal.py
+++ b/meta/lib/oeqa/runtime/cases/terminal.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.runtime.decorator.package import OEHasPackage 8from oeqa.runtime.decorator.package import OEHasPackage
diff --git a/meta/lib/oeqa/runtime/cases/usb_hid.py b/meta/lib/oeqa/runtime/cases/usb_hid.py
index 3c292cf661..6f23d2ff51 100644
--- a/meta/lib/oeqa/runtime/cases/usb_hid.py
+++ b/meta/lib/oeqa/runtime/cases/usb_hid.py
@@ -1,3 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.runtime.case import OERuntimeTestCase 6from oeqa.runtime.case import OERuntimeTestCase
2from oeqa.core.decorator.depends import OETestDepends 7from oeqa.core.decorator.depends import OETestDepends
3from oeqa.core.decorator.data import skipIfQemu 8from oeqa.core.decorator.data import skipIfQemu
@@ -14,7 +19,7 @@ class USB_HID_Test(OERuntimeTestCase):
14 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output) 19 return self.assertEqual(status, 0, msg = 'Failed to suspends your system to RAM, output : %s' % output)
15 20
16 @OEHasPackage(['xdotool']) 21 @OEHasPackage(['xdotool'])
17 @skipIfQemu('qemuall', 'Test only runs on real hardware') 22 @skipIfQemu()
18 @OETestDepends(['ssh.SSHTest.test_ssh']) 23 @OETestDepends(['ssh.SSHTest.test_ssh'])
19 def test_USB_Hid_input(self): 24 def test_USB_Hid_input(self):
20 self.keyboard_mouse_simulation() 25 self.keyboard_mouse_simulation()
diff --git a/meta/lib/oeqa/runtime/cases/weston.py b/meta/lib/oeqa/runtime/cases/weston.py
index a1c7183213..ee4d336482 100644
--- a/meta/lib/oeqa/runtime/cases/weston.py
+++ b/meta/lib/oeqa/runtime/cases/weston.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -10,7 +12,7 @@ import threading
10import time 12import time
11 13
12class WestonTest(OERuntimeTestCase): 14class WestonTest(OERuntimeTestCase):
13 weston_log_file = '/tmp/weston.log' 15 weston_log_file = '/tmp/weston-2.log'
14 16
15 @classmethod 17 @classmethod
16 def tearDownClass(cls): 18 def tearDownClass(cls):
@@ -31,13 +33,13 @@ class WestonTest(OERuntimeTestCase):
31 return output.split(" ") 33 return output.split(" ")
32 34
33 def get_weston_command(self, cmd): 35 def get_weston_command(self, cmd):
34 return 'export XDG_RUNTIME_DIR=/run/user/0; export WAYLAND_DISPLAY=wayland-0; %s' % cmd 36 return 'export XDG_RUNTIME_DIR=/run/user/`id -u weston`; export WAYLAND_DISPLAY=wayland-1; %s' % cmd
35 37
36 def run_weston_init(self): 38 def run_weston_init(self):
37 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']: 39 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
38 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file) 40 self.target.run('systemd-run --collect --unit=weston-ptest.service --uid=0 -p PAMName=login -p TTYPath=/dev/tty6 -E XDG_RUNTIME_DIR=/tmp -E WAYLAND_DISPLAY=wayland-0 /usr/bin/weston --socket=wayland-1 --log=%s' % self.weston_log_file)
39 else: 41 else:
40 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-1 --log=%s' % self.weston_log_file)) 42 self.target.run(self.get_weston_command('openvt -- weston --socket=wayland-2 --log=%s' % self.weston_log_file))
41 43
42 def get_new_wayland_processes(self, existing_wl_processes): 44 def get_new_wayland_processes(self, existing_wl_processes):
43 try_cnt = 0 45 try_cnt = 0
@@ -53,7 +55,11 @@ class WestonTest(OERuntimeTestCase):
53 55
54 @OEHasPackage(['wayland-utils']) 56 @OEHasPackage(['wayland-utils'])
55 def test_wayland_info(self): 57 def test_wayland_info(self):
56 status, output = self.target.run(self.get_weston_command('wayland-info')) 58 if 'systemd' in self.tc.td['VIRTUAL-RUNTIME_init_manager']:
59 command = 'XDG_RUNTIME_DIR=/run wayland-info'
60 else:
61 command = self.get_weston_command('wayland-info')
62 status, output = self.target.run(command)
57 self.assertEqual(status, 0, msg='wayland-info error: %s' % output) 63 self.assertEqual(status, 0, msg='wayland-info error: %s' % output)
58 64
59 @OEHasPackage(['weston']) 65 @OEHasPackage(['weston'])
@@ -73,3 +79,11 @@ class WestonTest(OERuntimeTestCase):
73 self.target.run('kill -9 %s' % w) 79 self.target.run('kill -9 %s' % w)
74 __, weston_log = self.target.run('cat %s' % self.weston_log_file) 80 __, weston_log = self.target.run('cat %s' % self.weston_log_file)
75 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log)) 81 self.assertTrue(new_wl_processes, msg='Could not get new weston-desktop-shell processes (%s, try_cnt:%s) weston log: %s' % (new_wl_processes, try_cnt, weston_log))
82
83 @skipIfNotFeature('x11', 'Test requires x11 to be in DISTRO_FEATURES')
84 @OEHasPackage(['weston'])
85 def test_weston_supports_xwayland(self):
86 cmd ='cat %s | grep "xserver listening on display"' % self.weston_log_file
87 status, output = self.target.run(cmd)
88 msg = ('xwayland does not appear to be running')
89 self.assertEqual(status, 0, msg=msg)
diff --git a/meta/lib/oeqa/runtime/cases/x32lib.py b/meta/lib/oeqa/runtime/cases/x32lib.py
index f419c8f181..014da4b386 100644
--- a/meta/lib/oeqa/runtime/cases/x32lib.py
+++ b/meta/lib/oeqa/runtime/cases/x32lib.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/cases/xorg.py b/meta/lib/oeqa/runtime/cases/xorg.py
index d6845587c2..09afb1e3d1 100644
--- a/meta/lib/oeqa/runtime/cases/xorg.py
+++ b/meta/lib/oeqa/runtime/cases/xorg.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/runtime/context.py b/meta/lib/oeqa/runtime/context.py
index 3826f27642..cb7227a8df 100644
--- a/meta/lib/oeqa/runtime/context.py
+++ b/meta/lib/oeqa/runtime/context.py
@@ -5,11 +5,11 @@
5# 5#
6 6
7import os 7import os
8import sys
8 9
9from oeqa.core.context import OETestContext, OETestContextExecutor 10from oeqa.core.context import OETestContext, OETestContextExecutor
10from oeqa.core.target.ssh import OESSHTarget 11from oeqa.core.target.ssh import OESSHTarget
11from oeqa.core.target.qemu import OEQemuTarget 12from oeqa.core.target.qemu import OEQemuTarget
12from oeqa.utils.dump import HostDumper
13 13
14from oeqa.runtime.loader import OERuntimeTestLoader 14from oeqa.runtime.loader import OERuntimeTestLoader
15 15
@@ -19,12 +19,11 @@ class OERuntimeTestContext(OETestContext):
19 os.path.dirname(os.path.abspath(__file__)), "files") 19 os.path.dirname(os.path.abspath(__file__)), "files")
20 20
21 def __init__(self, td, logger, target, 21 def __init__(self, td, logger, target,
22 host_dumper, image_packages, extract_dir): 22 image_packages, extract_dir):
23 super(OERuntimeTestContext, self).__init__(td, logger) 23 super(OERuntimeTestContext, self).__init__(td, logger)
24 24
25 self.target = target 25 self.target = target
26 self.image_packages = image_packages 26 self.image_packages = image_packages
27 self.host_dumper = host_dumper
28 self.extract_dir = extract_dir 27 self.extract_dir = extract_dir
29 self._set_target_cmds() 28 self._set_target_cmds()
30 29
@@ -66,11 +65,11 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
66 % self.default_target_type) 65 % self.default_target_type)
67 runtime_group.add_argument('--target-ip', action='store', 66 runtime_group.add_argument('--target-ip', action='store',
68 default=self.default_target_ip, 67 default=self.default_target_ip,
69 help="IP address of device under test, default: %s" \ 68 help="IP address and optionally ssh port (default 22) of device under test, for example '192.168.0.7:22'. Default: %s" \
70 % self.default_target_ip) 69 % self.default_target_ip)
71 runtime_group.add_argument('--server-ip', action='store', 70 runtime_group.add_argument('--server-ip', action='store',
72 default=self.default_target_ip, 71 default=self.default_target_ip,
73 help="IP address of device under test, default: %s" \ 72 help="IP address of the test host from test target machine, default: %s" \
74 % self.default_server_ip) 73 % self.default_server_ip)
75 74
76 runtime_group.add_argument('--host-dumper-dir', action='store', 75 runtime_group.add_argument('--host-dumper-dir', action='store',
@@ -119,8 +118,7 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
119 # XXX: Don't base your targets on this code it will be refactored 118 # XXX: Don't base your targets on this code it will be refactored
120 # in the near future. 119 # in the near future.
121 # Custom target module loading 120 # Custom target module loading
122 target_modules_path = kwargs.get('target_modules_path', '') 121 controller = OERuntimeTestContextExecutor.getControllerModule(target_type)
123 controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path)
124 target = controller(logger, target_ip, server_ip, **kwargs) 122 target = controller(logger, target_ip, server_ip, **kwargs)
125 123
126 return target 124 return target
@@ -130,15 +128,15 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
130 # AttributeError raised if not found. 128 # AttributeError raised if not found.
131 # ImportError raised if a provided module can not be imported. 129 # ImportError raised if a provided module can not be imported.
132 @staticmethod 130 @staticmethod
133 def getControllerModule(target, target_modules_path): 131 def getControllerModule(target):
134 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path) 132 controllerslist = OERuntimeTestContextExecutor._getControllerModulenames()
135 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) 133 controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist)
136 return controller 134 return controller
137 135
138 # Return a list of all python modules in lib/oeqa/controllers for each 136 # Return a list of all python modules in lib/oeqa/controllers for each
139 # layer in bbpath 137 # layer in bbpath
140 @staticmethod 138 @staticmethod
141 def _getControllerModulenames(target_modules_path): 139 def _getControllerModulenames():
142 140
143 controllerslist = [] 141 controllerslist = []
144 142
@@ -153,9 +151,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
153 else: 151 else:
154 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) 152 raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module)
155 153
156 extpath = target_modules_path.split(':') 154 # sys.path can contain duplicate paths, but because of the login in
157 for p in extpath: 155 # add_controller_list this doesn't work and causes testimage to abort.
158 controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers') 156 # Remove duplicates using an intermediate dictionary to ensure this
157 # doesn't happen.
158 for p in list(dict.fromkeys(sys.path)):
159 controllerpath = os.path.join(p, 'oeqa', 'controllers')
159 if os.path.exists(controllerpath): 160 if os.path.exists(controllerpath):
160 add_controller_list(controllerpath) 161 add_controller_list(controllerpath)
161 return controllerslist 162 return controllerslist
@@ -175,16 +176,12 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
175 # Search for and return a controller or None from given module name 176 # Search for and return a controller or None from given module name
176 @staticmethod 177 @staticmethod
177 def _loadControllerFromModule(target, modulename): 178 def _loadControllerFromModule(target, modulename):
178 obj = None
179 # import module, allowing it to raise import exception
180 module = __import__(modulename, globals(), locals(), [target])
181 # look for target class in the module, catching any exceptions as it
182 # is valid that a module may not have the target class.
183 try: 179 try:
184 obj = getattr(module, target) 180 import importlib
185 except: 181 module = importlib.import_module(modulename)
186 obj = None 182 return getattr(module, target)
187 return obj 183 except AttributeError:
184 return None
188 185
189 @staticmethod 186 @staticmethod
190 def readPackagesManifest(manifest): 187 def readPackagesManifest(manifest):
@@ -200,10 +197,6 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
200 197
201 return image_packages 198 return image_packages
202 199
203 @staticmethod
204 def getHostDumper(cmds, directory):
205 return HostDumper(cmds, directory)
206
207 def _process_args(self, logger, args): 200 def _process_args(self, logger, args):
208 if not args.packages_manifest: 201 if not args.packages_manifest:
209 raise TypeError('Manifest file not provided') 202 raise TypeError('Manifest file not provided')
@@ -216,9 +209,6 @@ class OERuntimeTestContextExecutor(OETestContextExecutor):
216 self.tc_kwargs['init']['target'] = \ 209 self.tc_kwargs['init']['target'] = \
217 OERuntimeTestContextExecutor.getTarget(args.target_type, 210 OERuntimeTestContextExecutor.getTarget(args.target_type,
218 None, args.target_ip, args.server_ip, **target_kwargs) 211 None, args.target_ip, args.server_ip, **target_kwargs)
219 self.tc_kwargs['init']['host_dumper'] = \
220 OERuntimeTestContextExecutor.getHostDumper(None,
221 args.host_dumper_dir)
222 self.tc_kwargs['init']['image_packages'] = \ 212 self.tc_kwargs['init']['image_packages'] = \
223 OERuntimeTestContextExecutor.readPackagesManifest( 213 OERuntimeTestContextExecutor.readPackagesManifest(
224 args.packages_manifest) 214 args.packages_manifest)
diff --git a/meta/lib/oeqa/runtime/decorator/package.py b/meta/lib/oeqa/runtime/decorator/package.py
index 57178655cc..b78ac9fc38 100644
--- a/meta/lib/oeqa/runtime/decorator/package.py
+++ b/meta/lib/oeqa/runtime/decorator/package.py
@@ -5,7 +5,6 @@
5# 5#
6 6
7from oeqa.core.decorator import OETestDecorator, registerDecorator 7from oeqa.core.decorator import OETestDecorator, registerDecorator
8from oeqa.core.utils.misc import strToSet
9 8
10@registerDecorator 9@registerDecorator
11class OEHasPackage(OETestDecorator): 10class OEHasPackage(OETestDecorator):
@@ -34,25 +33,30 @@ class OEHasPackage(OETestDecorator):
34 def setUpDecorator(self): 33 def setUpDecorator(self):
35 need_pkgs = set() 34 need_pkgs = set()
36 unneed_pkgs = set() 35 unneed_pkgs = set()
37 pkgs = strToSet(self.need_pkgs) 36
38 for pkg in pkgs: 37 # Turn literal strings into a list so we can just iterate over it
38 if isinstance(self.need_pkgs, str):
39 self.need_pkgs = [self.need_pkgs,]
40
41 mlprefix = self.case.td.get("MLPREFIX")
42 for pkg in self.need_pkgs:
39 if pkg.startswith('!'): 43 if pkg.startswith('!'):
40 unneed_pkgs.add(pkg[1:]) 44 unneed_pkgs.add(mlprefix + pkg[1:])
41 else: 45 else:
42 need_pkgs.add(pkg) 46 need_pkgs.add(mlprefix + pkg)
43 47
44 if unneed_pkgs: 48 if unneed_pkgs:
45 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs) 49 msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs)
46 self.logger.debug(msg) 50 self.logger.debug(msg)
47 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs): 51 if not self.case.tc.image_packages.isdisjoint(unneed_pkgs):
48 msg = "Test can't run with %s installed" % ', or'.join(unneed_pkgs) 52 msg = "Test can't run with %s installed" % ', or '.join(unneed_pkgs)
49 self._decorator_fail(msg) 53 self._decorator_fail(msg)
50 54
51 if need_pkgs: 55 if need_pkgs:
52 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs) 56 msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs)
53 self.logger.debug(msg) 57 self.logger.debug(msg)
54 if self.case.tc.image_packages.isdisjoint(need_pkgs): 58 if self.case.tc.image_packages.isdisjoint(need_pkgs):
55 msg = "Test requires %s to be installed" % ', or'.join(need_pkgs) 59 msg = "Test requires %s to be installed" % ', or '.join(need_pkgs)
56 self._decorator_fail(msg) 60 self._decorator_fail(msg)
57 61
58 def _decorator_fail(self, msg): 62 def _decorator_fail(self, msg):
diff --git a/meta/lib/oeqa/runtime/files/hello.stp b/meta/lib/oeqa/runtime/files/hello.stp
deleted file mode 100644
index 3677147162..0000000000
--- a/meta/lib/oeqa/runtime/files/hello.stp
+++ /dev/null
@@ -1 +0,0 @@
1probe oneshot { println("hello world") }
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/README b/meta/lib/oeqa/sdk/buildtools-cases/README
new file mode 100644
index 0000000000..d4f20faa9f
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-cases/README
@@ -0,0 +1,2 @@
1These test cases are used by buildtools-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/build.py b/meta/lib/oeqa/sdk/buildtools-cases/build.py
new file mode 100644
index 0000000000..c85c32496b
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-cases/build.py
@@ -0,0 +1,32 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os, tempfile
8import time
9from oeqa.sdk.case import OESDKTestCase
10from oeqa.utils.subprocesstweak import errors_have_output
11errors_have_output()
12
13class BuildTests(OESDKTestCase):
14 """
15 Verify that bitbake can build virtual/libc inside the buildtools.
16 """
17 def test_libc(self):
18 with tempfile.TemporaryDirectory(prefix='bitbake-build-', dir=self.tc.sdk_dir) as testdir:
19 corebase = self.td['COREBASE']
20
21 self._run('. %s/oe-init-build-env %s' % (corebase, testdir))
22 with open(os.path.join(testdir, 'conf', 'local.conf'), 'ta') as conf:
23 conf.write('\n')
24 conf.write('DL_DIR = "%s"\n' % self.td['DL_DIR'])
25
26 try:
27 self._run('. %s/oe-init-build-env %s && bitbake virtual/libc' % (corebase, testdir))
28 finally:
29 delay = 10
30 while delay and (os.path.exists(testdir + "/bitbake.lock") or os.path.exists(testdir + "/cache/hashserv.db-wal")):
31 time.sleep(1)
32 delay = delay - 1
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/gcc.py b/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
new file mode 100644
index 0000000000..a62c4d0bc4
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-cases/gcc.py
@@ -0,0 +1,31 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os.path
8from oeqa.sdk.case import OESDKTestCase
9
10class GccTests(OESDKTestCase):
11 def test_verify_specs(self):
12 """
13 Verify that the compiler has been relocated successfully and isn't
14 looking in the hard-coded prefix.
15 """
16 # Canonicalise the SDK root
17 sdk_base = os.path.realpath(self.tc.sdk_dir)
18 # Canonicalise the location of GCC
19 gcc_path = os.path.realpath(self._run("command -v gcc").strip())
20 # Skip the test if the GCC didn't come from the buildtools, as it only
21 # comes with buildtools-extended-tarball.
22 if os.path.commonprefix((sdk_base, gcc_path)) != sdk_base:
23 self.skipTest("Buildtools does not provide GCC")
24
25 # This is the prefix that GCC is build with, and should be replaced at
26 # installation time.
27 sdkpath = self.td.get("SDKPATH")
28 self.assertTrue(sdkpath)
29
30 for line in self._run('gcc -dumpspecs').splitlines():
31 self.assertNotIn(sdkpath, line)
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/https.py b/meta/lib/oeqa/sdk/buildtools-cases/https.py
new file mode 100644
index 0000000000..4525e3d758
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-cases/https.py
@@ -0,0 +1,22 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.sdk.case import OESDKTestCase
8from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output()
10
11class HTTPTests(OESDKTestCase):
12 """
13 Verify that HTTPS certificates are working correctly, as this depends on
14 environment variables being set correctly.
15 """
16
17 def test_wget(self):
18 self._run('env -i wget --debug --output-document /dev/null https://yoctoproject.org/connectivity.html')
19
20 def test_python(self):
21 # urlopen() returns a file-like object on success and throws an exception otherwise
22 self._run('python3 -c \'import urllib.request; urllib.request.urlopen("https://yoctoproject.org/connectivity.html")\'')
diff --git a/meta/lib/oeqa/sdk/buildtools-cases/sanity.py b/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
new file mode 100644
index 0000000000..a55d456656
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-cases/sanity.py
@@ -0,0 +1,24 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import shutil
8import os.path
9from oeqa.sdk.case import OESDKTestCase
10
11class SanityTests(OESDKTestCase):
12 def test_tools(self):
13 """
14 Test that wget and tar come from the buildtools, not the host. This
15 verifies that the buildtools have installed correctly. We can't check
16 for gcc as that is only installed by buildtools-extended.
17 """
18 for command in ("tar", "wget"):
19 # Canonicalise the SDK root
20 sdk_base = os.path.realpath(self.tc.sdk_dir)
21 # Canonicalise the location of this command
22 tool_path = os.path.realpath(self._run("command -v %s" % command).strip())
23 # Assert that the tool was found inside the SDK root
24 self.assertEqual(os.path.commonprefix((sdk_base, tool_path)), sdk_base)
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/README b/meta/lib/oeqa/sdk/buildtools-docs-cases/README
new file mode 100644
index 0000000000..f8edbc7dad
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-docs-cases/README
@@ -0,0 +1,2 @@
1These test cases are used by build-docs-tarball, and are not used by the testsdk
2class.
diff --git a/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py b/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
new file mode 100644
index 0000000000..6e3ee94292
--- /dev/null
+++ b/meta/lib/oeqa/sdk/buildtools-docs-cases/build.py
@@ -0,0 +1,19 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import tempfile
8from oeqa.sdk.case import OESDKTestCase
9from oeqa.utils.subprocesstweak import errors_have_output
10errors_have_output()
11
12class BuildTests(OESDKTestCase):
13 """
14 Verify that our docs can build using our docs tools tarball.
15 """
16 def test_docs_build(self):
17 with tempfile.TemporaryDirectory(prefix='docs-tarball-build-', dir=self.tc.sdk_dir) as testdir:
18 self._run('git clone git://git.yoctoproject.org/yocto-docs %s' % testdir)
19 self._run('cd %s/documentation && make html' % testdir)
diff --git a/meta/lib/oeqa/sdk/cases/assimp.py b/meta/lib/oeqa/sdk/cases/assimp.py
index f166758e49..e986838aea 100644
--- a/meta/lib/oeqa/sdk/cases/assimp.py
+++ b/meta/lib/oeqa/sdk/cases/assimp.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -23,18 +25,21 @@ class BuildAssimp(OESDKTestCase):
23 25
24 def test_assimp(self): 26 def test_assimp(self):
25 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir: 27 with tempfile.TemporaryDirectory(prefix="assimp", dir=self.tc.sdk_dir) as testdir:
26 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v4.1.0.tar.gz") 28 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://github.com/assimp/assimp/archive/v5.3.1.tar.gz")
27 29
28 dirs = {} 30 dirs = {}
29 dirs["source"] = os.path.join(testdir, "assimp-4.1.0") 31 dirs["source"] = os.path.join(testdir, "assimp-5.3.1")
30 dirs["build"] = os.path.join(testdir, "build") 32 dirs["build"] = os.path.join(testdir, "build")
31 dirs["install"] = os.path.join(testdir, "install") 33 dirs["install"] = os.path.join(testdir, "install")
32 34
33 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT) 35 subprocess.check_output(["tar", "xf", tarball, "-C", testdir], stderr=subprocess.STDOUT)
34 self.assertTrue(os.path.isdir(dirs["source"])) 36 self.assertTrue(os.path.isdir(dirs["source"]))
37 # Apply the zlib patch https://github.com/madler/zlib/commit/a566e156b3fa07b566ddbf6801b517a9dba04fa3
38 # this sed wont be needed once assimp moves its zlib copy to v1.3.1+
39 self._run("sed -i '/# ifdef _FILE_OFFSET_BITS/I,+2 d' {source}/contrib/zlib/gzguts.h".format(**dirs))
35 os.makedirs(dirs["build"]) 40 os.makedirs(dirs["build"])
36 41
37 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON {source}".format(**dirs)) 42 self._run("cd {build} && cmake -DCMAKE_VERBOSE_MAKEFILE:BOOL=ON -DASSIMP_BUILD_ZLIB=ON {source}".format(**dirs))
38 self._run("cmake --build {build} -- -j".format(**dirs)) 43 self._run("cmake --build {build} -- -j".format(**dirs))
39 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs)) 44 self._run("cmake --build {build} --target install -- DESTDIR={install}".format(**dirs))
40 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.4.1.0")) 45 self.check_elf(os.path.join(dirs["install"], "usr", "local", "lib", "libassimp.so.5.3.0"))
diff --git a/meta/lib/oeqa/sdk/cases/buildcpio.py b/meta/lib/oeqa/sdk/cases/buildcpio.py
index e7fc211a47..51003b19cd 100644
--- a/meta/lib/oeqa/sdk/cases/buildcpio.py
+++ b/meta/lib/oeqa/sdk/cases/buildcpio.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,10 +19,10 @@ class BuildCpioTest(OESDKTestCase):
17 """ 19 """
18 def test_cpio(self): 20 def test_cpio(self):
19 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir: 21 with tempfile.TemporaryDirectory(prefix="cpio-", dir=self.tc.sdk_dir) as testdir:
20 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz") 22 tarball = self.fetch(testdir, self.td["DL_DIR"], "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz")
21 23
22 dirs = {} 24 dirs = {}
23 dirs["source"] = os.path.join(testdir, "cpio-2.13") 25 dirs["source"] = os.path.join(testdir, "cpio-2.15")
24 dirs["build"] = os.path.join(testdir, "build") 26 dirs["build"] = os.path.join(testdir, "build")
25 dirs["install"] = os.path.join(testdir, "install") 27 dirs["install"] = os.path.join(testdir, "install")
26 28
@@ -28,8 +30,7 @@ class BuildCpioTest(OESDKTestCase):
28 self.assertTrue(os.path.isdir(dirs["source"])) 30 self.assertTrue(os.path.isdir(dirs["source"]))
29 os.makedirs(dirs["build"]) 31 os.makedirs(dirs["build"])
30 32
31 self._run("sed -i -e '/char.*program_name/d' {source}/src/global.c".format(**dirs)) 33 self._run("cd {build} && {source}/configure $CONFIGURE_FLAGS".format(**dirs))
32 self._run("cd {build} && {source}/configure --disable-maintainer-mode $CONFIGURE_FLAGS".format(**dirs))
33 self._run("cd {build} && make -j".format(**dirs)) 34 self._run("cd {build} && make -j".format(**dirs))
34 self._run("cd {build} && make install DESTDIR={install}".format(**dirs)) 35 self._run("cd {build} && make install DESTDIR={install}".format(**dirs))
35 36
diff --git a/meta/lib/oeqa/sdk/cases/buildepoxy.py b/meta/lib/oeqa/sdk/cases/buildepoxy.py
index 385f8ccca8..147ee3e0ee 100644
--- a/meta/lib/oeqa/sdk/cases/buildepoxy.py
+++ b/meta/lib/oeqa/sdk/cases/buildepoxy.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -16,8 +18,9 @@ class EpoxyTest(OESDKTestCase):
16 Test that Meson builds correctly. 18 Test that Meson builds correctly.
17 """ 19 """
18 def setUp(self): 20 def setUp(self):
19 if not (self.tc.hasHostPackage("nativesdk-meson")): 21 if not (self.tc.hasHostPackage("nativesdk-meson") or
20 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain Meson") 22 self.tc.hasHostPackage("meson-native")):
23 raise unittest.SkipTest("EpoxyTest class: SDK doesn't contain Meson")
21 24
22 def test_epoxy(self): 25 def test_epoxy(self):
23 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir: 26 with tempfile.TemporaryDirectory(prefix="epoxy", dir=self.tc.sdk_dir) as testdir:
@@ -32,7 +35,7 @@ class EpoxyTest(OESDKTestCase):
32 self.assertTrue(os.path.isdir(dirs["source"])) 35 self.assertTrue(os.path.isdir(dirs["source"]))
33 os.makedirs(dirs["build"]) 36 os.makedirs(dirs["build"])
34 37
35 log = self._run("meson -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs)) 38 log = self._run("meson --warnlevel 1 -Degl=no -Dglx=no -Dx11=false {build} {source}".format(**dirs))
36 # Check that Meson thinks we're doing a cross build and not a native 39 # Check that Meson thinks we're doing a cross build and not a native
37 self.assertIn("Build type: cross build", log) 40 self.assertIn("Build type: cross build", log)
38 self._run("ninja -C {build} -v".format(**dirs)) 41 self._run("ninja -C {build} -v".format(**dirs))
diff --git a/meta/lib/oeqa/sdk/cases/buildgalculator.py b/meta/lib/oeqa/sdk/cases/buildgalculator.py
index eb3c8ddf39..178f07472d 100644
--- a/meta/lib/oeqa/sdk/cases/buildgalculator.py
+++ b/meta/lib/oeqa/sdk/cases/buildgalculator.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -19,7 +21,8 @@ class GalculatorTest(OESDKTestCase):
19 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \ 21 if not (self.tc.hasTargetPackage("gtk+3", multilib=True) or \
20 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)): 22 self.tc.hasTargetPackage("libgtk-3.0", multilib=True)):
21 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3") 23 raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3")
22 if not (self.tc.hasHostPackage("nativesdk-gettext-dev")): 24 if not (self.tc.hasHostPackage("nativesdk-gettext-dev") or
25 self.tc.hasHostPackage("gettext-native")):
23 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext") 26 raise unittest.SkipTest("GalculatorTest class: SDK doesn't contain gettext")
24 27
25 def test_galculator(self): 28 def test_galculator(self):
diff --git a/meta/lib/oeqa/sdk/cases/buildlzip.py b/meta/lib/oeqa/sdk/cases/buildlzip.py
index 49ae756bf3..b4b7d85b88 100644
--- a/meta/lib/oeqa/sdk/cases/buildlzip.py
+++ b/meta/lib/oeqa/sdk/cases/buildlzip.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/sdk/cases/gcc.py b/meta/lib/oeqa/sdk/cases/gcc.py
index eb08eadd28..fc28b9c3d4 100644
--- a/meta/lib/oeqa/sdk/cases/gcc.py
+++ b/meta/lib/oeqa/sdk/cases/gcc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/sdk/cases/maturin.py b/meta/lib/oeqa/sdk/cases/maturin.py
new file mode 100644
index 0000000000..ea10f568b2
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/maturin.py
@@ -0,0 +1,79 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase
13from oeqa.utils.subprocesstweak import errors_have_output
14
15errors_have_output()
16
17
18class MaturinTest(OESDKTestCase):
19 def setUp(self):
20 if not (
21 self.tc.hasHostPackage("nativesdk-python3-maturin")
22 or self.tc.hasHostPackage("python3-maturin-native")
23 ):
24 raise unittest.SkipTest("No python3-maturin package in the SDK")
25
26 def test_maturin_list_python(self):
27 py_major = self._run("python3 -c 'import sys; print(sys.version_info.major)'")
28 py_minor = self._run("python3 -c 'import sys; print(sys.version_info.minor)'")
29 python_version = "%s.%s" % (py_major.strip(), py_minor.strip())
30 cmd = "maturin list-python"
31 output = self._run(cmd)
32 self.assertRegex(output, r"^🐍 1 python interpreter found:\n")
33 self.assertRegex(
34 output,
35 r" - CPython %s (.+)/usr/bin/python%s$" % (python_version, python_version),
36 )
37
38
39class MaturinDevelopTest(OESDKTestCase):
40 @classmethod
41 def setUpClass(self):
42 targetdir = os.path.join(self.tc.sdk_dir, "guessing-game")
43 try:
44 shutil.rmtree(targetdir)
45 except FileNotFoundError:
46 pass
47 shutil.copytree(
48 os.path.join(self.tc.files_dir, "maturin/guessing-game"), targetdir
49 )
50
51 def setUp(self):
52 machine = self.td.get("MACHINE")
53 if not (
54 self.tc.hasHostPackage("nativesdk-python3-maturin")
55 or self.tc.hasHostPackage("python3-maturin-native")
56 ):
57 raise unittest.SkipTest("No python3-maturin package in the SDK")
58 if not (
59 self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine)
60 ):
61 raise unittest.SkipTest(
62 "Testing 'maturin develop' requires Rust cross-canadian in the SDK"
63 )
64
65 def test_maturin_develop(self):
66 """
67 This test case requires:
68 (1) that a .venv can been created.
69 (2) a functional 'rustc' and 'cargo'
70 """
71 self._run("cd %s/guessing-game; python3 -m venv .venv" % self.tc.sdk_dir)
72 cmd = "cd %s/guessing-game; maturin develop" % self.tc.sdk_dir
73 output = self._run(cmd)
74 self.assertRegex(output, r"🔗 Found pyo3 bindings with abi3 support for Python ≥ 3.8")
75 self.assertRegex(output, r"🐍 Not using a specific python interpreter")
76 self.assertRegex(output, r"📡 Using build options features from pyproject.toml")
77 self.assertRegex(output, r"Compiling guessing-game v0.1.0")
78 self.assertRegex(output, r"📦 Built wheel for abi3 Python ≥ 3.8")
79 self.assertRegex(output, r"🛠 Installed guessing-game-0.1.0")
diff --git a/meta/lib/oeqa/sdk/cases/perl.py b/meta/lib/oeqa/sdk/cases/perl.py
index 14d76d820f..8eab4442e8 100644
--- a/meta/lib/oeqa/sdk/cases/perl.py
+++ b/meta/lib/oeqa/sdk/cases/perl.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/sdk/cases/python.py b/meta/lib/oeqa/sdk/cases/python.py
index a334abce5f..5ea992b9f3 100644
--- a/meta/lib/oeqa/sdk/cases/python.py
+++ b/meta/lib/oeqa/sdk/cases/python.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,17 +10,6 @@ from oeqa.sdk.case import OESDKTestCase
8from oeqa.utils.subprocesstweak import errors_have_output 10from oeqa.utils.subprocesstweak import errors_have_output
9errors_have_output() 11errors_have_output()
10 12
11class Python2Test(OESDKTestCase):
12 def setUp(self):
13 if not (self.tc.hasHostPackage("nativesdk-python-core") or
14 self.tc.hasHostPackage("python-core-native")):
15 raise unittest.SkipTest("No python package in the SDK")
16
17 def test_python2(self):
18 cmd = "python -c \"import codecs; print(codecs.encode('Uryyb, jbeyq', 'rot13'))\""
19 output = self._run(cmd)
20 self.assertEqual(output, "Hello, world\n")
21
22class Python3Test(OESDKTestCase): 13class Python3Test(OESDKTestCase):
23 def setUp(self): 14 def setUp(self):
24 if not (self.tc.hasHostPackage("nativesdk-python3-core") or 15 if not (self.tc.hasHostPackage("nativesdk-python3-core") or
diff --git a/meta/lib/oeqa/sdk/cases/rust.py b/meta/lib/oeqa/sdk/cases/rust.py
new file mode 100644
index 0000000000..f5d437bb19
--- /dev/null
+++ b/meta/lib/oeqa/sdk/cases/rust.py
@@ -0,0 +1,57 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import unittest
10
11from oeqa.core.utils.path import remove_safe
12from oeqa.sdk.case import OESDKTestCase
13
14from oeqa.utils.subprocesstweak import errors_have_output
15errors_have_output()
16
17class RustCompileTest(OESDKTestCase):
18 td_vars = ['MACHINE']
19
20 @classmethod
21 def setUpClass(self):
22 targetdir = os.path.join(self.tc.sdk_dir, "hello")
23 try:
24 shutil.rmtree(targetdir)
25 except FileNotFoundError:
26 pass
27 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
28
29 def setUp(self):
30 machine = self.td.get("MACHINE")
31 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
32 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
33
34 def test_cargo_build(self):
35 self._run('cd %s/hello; cargo build' % self.tc.sdk_dir)
36
37class RustHostCompileTest(OESDKTestCase):
38 td_vars = ['MACHINE', 'SDK_SYS']
39
40 @classmethod
41 def setUpClass(self):
42 targetdir = os.path.join(self.tc.sdk_dir, "hello")
43 try:
44 shutil.rmtree(targetdir)
45 except FileNotFoundError:
46 pass
47 shutil.copytree(os.path.join(self.tc.sdk_files_dir, "rust/hello"), targetdir)
48
49 def setUp(self):
50 machine = self.td.get("MACHINE")
51 if not self.tc.hasHostPackage("packagegroup-rust-cross-canadian-%s" % machine):
52 raise unittest.SkipTest("RustCompileTest class: SDK doesn't contain a Rust cross-canadian toolchain")
53
54 def test_cargo_build(self):
55 sdksys = self.td.get("SDK_SYS")
56 self._run('cd %s/hello; cargo build --target %s-gnu' % (self.tc.sdk_dir, sdksys))
57 self._run('cd %s/hello; cargo run --target %s-gnu' % (self.tc.sdk_dir, sdksys))
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
new file mode 100644
index 0000000000..fe619478a6
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/Cargo.toml
@@ -0,0 +1,6 @@
1[package]
2name = "hello"
3version = "0.1.0"
4edition = "2021"
5
6[dependencies]
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/build.rs b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
new file mode 100644
index 0000000000..b1a533d5df
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/build.rs
@@ -0,0 +1,3 @@
1/* This is the simplest build script just to invoke host compiler
2 in the build process. */
3fn main() {}
diff --git a/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
new file mode 100644
index 0000000000..a06c03f82a
--- /dev/null
+++ b/meta/lib/oeqa/sdk/files/rust/hello/src/main.rs
@@ -0,0 +1,3 @@
1fn main() {
2 println!("Hello, OpenEmbedded world!");
3}
diff --git a/meta/lib/oeqa/sdk/testmetaidesupport.py b/meta/lib/oeqa/sdk/testmetaidesupport.py
new file mode 100644
index 0000000000..00ef30e82e
--- /dev/null
+++ b/meta/lib/oeqa/sdk/testmetaidesupport.py
@@ -0,0 +1,45 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7class TestSDK(object):
8 def run(self, d):
9 import json
10 import logging
11 from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor
12 from oeqa.utils import make_logger_bitbake_compatible
13
14 pn = d.getVar("PN")
15
16 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
17
18 sdk_dir = d.expand("${WORKDIR}/testsdk/")
19 bb.utils.remove(sdk_dir, True)
20 bb.utils.mkdirhier(sdk_dir)
21
22 sdk_envs = OESDKTestContextExecutor._get_sdk_environs(d.getVar("DEPLOY_DIR_IMAGE"))
23 tdname = d.expand("${DEPLOY_DIR_IMAGE}/${PN}.testdata.json")
24 test_data = json.load(open(tdname, "r"))
25
26 host_pkg_manifest = {"cmake-native":"", "gcc-cross":"", "gettext-native":"", "meson-native":"", "perl-native":"", "python3-core-native":"", }
27 target_pkg_manifest = {"gtk+3":""}
28
29 for s in sdk_envs:
30 bb.plain("meta-ide-support based SDK testing environment: %s" % s)
31
32 sdk_env = sdk_envs[s]
33
34 tc = OESDKTestContext(td=test_data, logger=logger, sdk_dir=sdk_dir,
35 sdk_env=sdk_env, target_pkg_manifest=target_pkg_manifest,
36 host_pkg_manifest=host_pkg_manifest)
37
38 tc.loadTests(OESDKTestContextExecutor.default_cases)
39
40 results = tc.runTests()
41 if results:
42 results.logSummary(pn)
43
44 if (not results) or (not results.wasSuccessful()):
45 bb.fatal('%s - FAILED' % (pn,), forcelog=True)
diff --git a/meta/lib/oeqa/sdk/testsdk.py b/meta/lib/oeqa/sdk/testsdk.py
index 35e40187bc..518b09febb 100644
--- a/meta/lib/oeqa/sdk/testsdk.py
+++ b/meta/lib/oeqa/sdk/testsdk.py
@@ -23,14 +23,6 @@ class TestSDKBase(object):
23 return configuration 23 return configuration
24 24
25 @staticmethod 25 @staticmethod
26 def get_sdk_json_result_dir(d):
27 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
28 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
29 if custom_json_result_dir:
30 json_result_dir = custom_json_result_dir
31 return json_result_dir
32
33 @staticmethod
34 def get_sdk_result_id(configuration): 26 def get_sdk_result_id(configuration):
35 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME']) 27 return '%s_%s_%s_%s_%s' % (configuration['TEST_TYPE'], configuration['IMAGE_BASENAME'], configuration['SDKMACHINE'], configuration['MACHINE'], configuration['STARTTIME'])
36 28
@@ -72,6 +64,7 @@ class TestSDK(TestSDKBase):
72 64
73 from bb.utils import export_proxies 65 from bb.utils import export_proxies
74 from oeqa.utils import make_logger_bitbake_compatible 66 from oeqa.utils import make_logger_bitbake_compatible
67 from oeqa.utils import get_json_result_dir
75 68
76 pn = d.getVar("PN") 69 pn = d.getVar("PN")
77 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 70 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -79,6 +72,9 @@ class TestSDK(TestSDKBase):
79 # sdk use network for download projects for build 72 # sdk use network for download projects for build
80 export_proxies(d) 73 export_proxies(d)
81 74
75 # We need the original PATH for testing the eSDK, not with our manipulations
76 os.environ['PATH'] = d.getVar("BB_ORIGENV", False).getVar("PATH")
77
82 tcname = self.get_tcname(d) 78 tcname = self.get_tcname(d)
83 79
84 if not os.path.exists(tcname): 80 if not os.path.exists(tcname):
@@ -131,7 +127,7 @@ class TestSDK(TestSDKBase):
131 component = "%s %s" % (pn, self.context_executor_class.name) 127 component = "%s %s" % (pn, self.context_executor_class.name)
132 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 128 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
133 configuration = self.get_sdk_configuration(d, self.test_type) 129 configuration = self.get_sdk_configuration(d, self.test_type)
134 result.logDetails(self.get_sdk_json_result_dir(d), 130 result.logDetails(get_json_result_dir(d),
135 configuration, 131 configuration,
136 self.get_sdk_result_id(configuration)) 132 self.get_sdk_result_id(configuration))
137 result.logSummary(component, context_msg) 133 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/sdkext/cases/devtool.py b/meta/lib/oeqa/sdkext/cases/devtool.py
index a5c6a76e02..5ffb732556 100644
--- a/meta/lib/oeqa/sdkext/cases/devtool.py
+++ b/meta/lib/oeqa/sdkext/cases/devtool.py
@@ -112,7 +112,7 @@ class SdkUpdateTest(OESDKExtTestCase):
112 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir) 112 cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir)
113 subprocess.check_output(cmd, shell=True) 113 subprocess.check_output(cmd, shell=True)
114 114
115 self.http_service = HTTPService(self.publish_dir) 115 self.http_service = HTTPService(self.publish_dir, logger=self.logger)
116 self.http_service.start() 116 self.http_service.start()
117 117
118 self.http_url = "http://127.0.0.1:%d" % self.http_service.port 118 self.http_url = "http://127.0.0.1:%d" % self.http_service.port
diff --git a/meta/lib/oeqa/sdkext/testsdk.py b/meta/lib/oeqa/sdkext/testsdk.py
index ffd185ec55..9d5a99d900 100644
--- a/meta/lib/oeqa/sdkext/testsdk.py
+++ b/meta/lib/oeqa/sdkext/testsdk.py
@@ -16,6 +16,7 @@ class TestSDKExt(TestSDKBase):
16 from bb.utils import export_proxies 16 from bb.utils import export_proxies
17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak 17 from oeqa.utils import avoid_paths_in_environ, make_logger_bitbake_compatible, subprocesstweak
18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor 18 from oeqa.sdkext.context import OESDKExtTestContext, OESDKExtTestContextExecutor
19 from oeqa.utils import get_json_result_dir
19 20
20 pn = d.getVar("PN") 21 pn = d.getVar("PN")
21 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) 22 logger = make_logger_bitbake_compatible(logging.getLogger("BitBake"))
@@ -67,10 +68,10 @@ class TestSDKExt(TestSDKBase):
67 # and we don't spend hours downloading kernels for the kernel module test 68 # and we don't spend hours downloading kernels for the kernel module test
68 # Abuse auto.conf since local.conf would be overwritten by the SDK 69 # Abuse auto.conf since local.conf would be overwritten by the SDK
69 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f: 70 with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f:
70 f.write('SSTATE_MIRRORS += " \\n file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR')) 71 f.write('SSTATE_MIRRORS += "file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR'))
71 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR')) 72 f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR'))
72 f.write('INHERIT += "own-mirrors"\n') 73 f.write('INHERIT += "own-mirrors"\n')
73 f.write('PREMIRRORS_prepend = " git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME \\n "\n' % test_data.get('DL_DIR')) 74 f.write('PREMIRRORS:prepend = "git://git.yoctoproject.org/.* git://%s/git2/git.yoctoproject.org.BASENAME "\n' % test_data.get('DL_DIR'))
74 75
75 # We need to do this in case we have a minimal SDK 76 # We need to do this in case we have a minimal SDK
76 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \ 77 subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % \
@@ -91,7 +92,7 @@ class TestSDKExt(TestSDKBase):
91 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name) 92 component = "%s %s" % (pn, OESDKExtTestContextExecutor.name)
92 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) 93 context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env))
93 configuration = self.get_sdk_configuration(d, 'sdkext') 94 configuration = self.get_sdk_configuration(d, 'sdkext')
94 result.logDetails(self.get_sdk_json_result_dir(d), 95 result.logDetails(get_json_result_dir(d),
95 configuration, 96 configuration,
96 self.get_sdk_result_id(configuration)) 97 self.get_sdk_result_id(configuration))
97 result.logSummary(component, context_msg) 98 result.logSummary(component, context_msg)
diff --git a/meta/lib/oeqa/selftest/case.py b/meta/lib/oeqa/selftest/case.py
index dcad4f76ec..da35b25f68 100644
--- a/meta/lib/oeqa/selftest/case.py
+++ b/meta/lib/oeqa/selftest/case.py
@@ -117,10 +117,6 @@ class OESelftestTestCase(OETestCase):
117 if e.errno != errno.ENOENT: 117 if e.errno != errno.ENOENT:
118 raise 118 raise
119 119
120 if self.tc.custommachine:
121 machine_conf = 'MACHINE ??= "%s"\n' % self.tc.custommachine
122 self.set_machine_config(machine_conf)
123
124 # tests might need their own setup 120 # tests might need their own setup
125 # but if they overwrite this one they have to call 121 # but if they overwrite this one they have to call
126 # super each time, so let's give them an alternative 122 # super each time, so let's give them an alternative
@@ -178,19 +174,11 @@ class OESelftestTestCase(OETestCase):
178 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data)) 174 self.logger.debug("Writing to: %s\n%s\n" % (dest_path, data))
179 ftools.write_file(dest_path, data) 175 ftools.write_file(dest_path, data)
180 176
181 if not multiconfig and self.tc.custommachine and 'MACHINE' in data:
182 machine = get_bb_var('MACHINE')
183 self.logger.warning('MACHINE overridden: %s' % machine)
184
185 def append_config(self, data): 177 def append_config(self, data):
186 """Append to <builddir>/conf/selftest.inc""" 178 """Append to <builddir>/conf/selftest.inc"""
187 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data)) 179 self.logger.debug("Appending to: %s\n%s\n" % (self.testinc_path, data))
188 ftools.append_file(self.testinc_path, data) 180 ftools.append_file(self.testinc_path, data)
189 181
190 if self.tc.custommachine and 'MACHINE' in data:
191 machine = get_bb_var('MACHINE')
192 self.logger.warning('MACHINE overridden: %s' % machine)
193
194 def remove_config(self, data): 182 def remove_config(self, data):
195 """Remove data from <builddir>/conf/selftest.inc""" 183 """Remove data from <builddir>/conf/selftest.inc"""
196 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data)) 184 self.logger.debug("Removing from: %s\n%s\n" % (self.testinc_path, data))
@@ -249,6 +237,13 @@ class OESelftestTestCase(OETestCase):
249 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data)) 237 self.logger.debug("Writing to: %s\n%s\n" % (self.machineinc_path, data))
250 ftools.write_file(self.machineinc_path, data) 238 ftools.write_file(self.machineinc_path, data)
251 239
240 def disable_class(self, classname):
241 destfile = "%s/classes/%s.bbclass" % (self.builddir, classname)
242 os.makedirs(os.path.dirname(destfile), exist_ok=True)
243 self.track_for_cleanup(destfile)
244 self.logger.debug("Creating empty class: %s\n" % (destfile))
245 ftools.write_file(destfile, "")
246
252 # check does path exist 247 # check does path exist
253 def assertExists(self, expr, msg=None): 248 def assertExists(self, expr, msg=None):
254 if not os.path.exists(expr): 249 if not os.path.exists(expr):
diff --git a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
index f7c356ad09..2c9584d329 100644
--- a/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
+++ b/meta/lib/oeqa/selftest/cases/_sstatetests_noauto.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,8 +8,8 @@ import os
6import shutil 8import shutil
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer 11from oeqa.utils.commands import runCmd, bitbake, get_bb_var
10from oeqa.selftest.cases.sstate import SStateBase 12from oeqa.selftest.cases.sstatetests import SStateBase
11 13
12 14
13class RebuildFromSState(SStateBase): 15class RebuildFromSState(SStateBase):
@@ -90,7 +92,7 @@ class RebuildFromSState(SStateBase):
90 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate))) 92 self.assertFalse(failed_cleansstate, msg="The following recipes have failed cleansstate(all others have passed both cleansstate and rebuild from sstate tests): %s" % ' '.join(map(str, failed_cleansstate)))
91 93
92 def test_sstate_relocation(self): 94 def test_sstate_relocation(self):
93 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=True, rebuild_dependencies=True) 95 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=True, rebuild_dependencies=True)
94 96
95 def test_sstate_rebuild(self): 97 def test_sstate_rebuild(self):
96 self.run_test_sstate_rebuild(['core-image-sato-sdk'], relocate=False, rebuild_dependencies=True) 98 self.run_test_sstate_rebuild(['core-image-weston-sdk'], relocate=False, rebuild_dependencies=True)
diff --git a/meta/lib/oeqa/selftest/cases/archiver.py b/meta/lib/oeqa/selftest/cases/archiver.py
index ddd08ecf84..3cb888c506 100644
--- a/meta/lib/oeqa/selftest/cases/archiver.py
+++ b/meta/lib/oeqa/selftest/cases/archiver.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import glob 8import glob
9import re
7from oeqa.utils.commands import bitbake, get_bb_vars 10from oeqa.utils.commands import bitbake, get_bb_vars
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9 12
@@ -35,11 +38,11 @@ class Archiver(OESelftestTestCase):
35 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) 38 src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS'])
36 39
37 # Check that include_recipe was included 40 # Check that include_recipe was included
38 included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) 41 included_present = len(glob.glob(src_path + '/%s-*/*' % include_recipe))
39 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) 42 self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe)
40 43
41 # Check that exclude_recipe was excluded 44 # Check that exclude_recipe was excluded
42 excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) 45 excluded_present = len(glob.glob(src_path + '/%s-*/*' % exclude_recipe))
43 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) 46 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe)
44 47
45 def test_archiver_filters_by_type(self): 48 def test_archiver_filters_by_type(self):
@@ -67,11 +70,11 @@ class Archiver(OESelftestTestCase):
67 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 70 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
68 71
69 # Check that target_recipe was included 72 # Check that target_recipe was included
70 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) 73 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipe))
71 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) 74 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe)
72 75
73 # Check that native_recipe was excluded 76 # Check that native_recipe was excluded
74 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) 77 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipe))
75 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) 78 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe)
76 79
77 def test_archiver_filters_by_type_and_name(self): 80 def test_archiver_filters_by_type_and_name(self):
@@ -104,20 +107,51 @@ class Archiver(OESelftestTestCase):
104 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) 107 src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS'])
105 108
106 # Check that target_recipe[0] and native_recipes[1] were included 109 # Check that target_recipe[0] and native_recipes[1] were included
107 included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) 110 included_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[0]))
108 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) 111 self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0])
109 112
110 included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) 113 included_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[1]))
111 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) 114 self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1])
112 115
113 # Check that native_recipes[0] and target_recipes[1] were excluded 116 # Check that native_recipes[0] and target_recipes[1] were excluded
114 excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) 117 excluded_present = len(glob.glob(src_path_native + '/%s-*/*' % native_recipes[0]))
115 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) 118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0])
116 119
117 excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) 120 excluded_present = len(glob.glob(src_path_target + '/%s-*/*' % target_recipes[1]))
118 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) 121 self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1])
119 122
123 def test_archiver_multiconfig_shared_unpack_and_patch(self):
124 """
125 Test that shared recipes in original mode with diff enabled works in multiconfig,
126 otherwise it will not build when using the same TMP dir.
127 """
128
129 features = 'BBMULTICONFIG = "mc1 mc2"\n'
130 features += 'INHERIT += "archiver"\n'
131 features += 'ARCHIVER_MODE[src] = "original"\n'
132 features += 'ARCHIVER_MODE[diff] = "1"\n'
133 self.write_config(features)
134
135 # We can use any machine in multiconfig as long as they are different
136 self.write_config('MACHINE = "qemuarm"\n', 'mc1')
137 self.write_config('MACHINE = "qemux86"\n', 'mc2')
138
139 task = 'do_unpack_and_patch'
140 # Use gcc-source as it is a shared recipe (appends the pv to the pn)
141 pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
142
143 # Generate the tasks signatures
144 bitbake('mc:mc1:%s mc:mc2:%s -c %s -S lockedsigs' % (pn, pn, task))
120 145
146 # Check the tasks signatures
147 # To be machine agnostic the tasks needs to generate the same signature for each machine
148 locked_sigs_inc = "%s/locked-sigs.inc" % self.builddir
149 locked_sigs = open(locked_sigs_inc).read()
150 task_sigs = re.findall(r"%s:%s:.*" % (pn, task), locked_sigs)
151 uniq_sigs = set(task_sigs)
152 self.assertFalse(len(uniq_sigs) - 1, \
153 'The task "%s" of the recipe "%s" has different signatures in "%s" for each machine in multiconfig' \
154 % (task, pn, locked_sigs_inc))
121 155
122 def test_archiver_srpm_mode(self): 156 def test_archiver_srpm_mode(self):
123 """ 157 """
@@ -163,21 +197,21 @@ class Archiver(OESelftestTestCase):
163 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`. 197 Test that the archiver works with `ARCHIVER_MODE[src] = "patched"`.
164 """ 198 """
165 199
166 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.gz') 200 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-patched.tar.xz')
167 201
168 def test_archiver_mode_configured(self): 202 def test_archiver_mode_configured(self):
169 """ 203 """
170 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`. 204 Test that the archiver works with `ARCHIVER_MODE[src] = "configured"`.
171 """ 205 """
172 206
173 self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.gz') 207 self._test_archiver_mode('configured', 'selftest-ed-native-1.14.1-r0-configured.tar.xz')
174 208
175 def test_archiver_mode_recipe(self): 209 def test_archiver_mode_recipe(self):
176 """ 210 """
177 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`. 211 Test that the archiver works with `ARCHIVER_MODE[recipe] = "1"`.
178 """ 212 """
179 213
180 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.gz', 214 self._test_archiver_mode('patched', 'selftest-ed-native-1.14.1-r0-recipe.tar.xz',
181 'ARCHIVER_MODE[recipe] = "1"\n') 215 'ARCHIVER_MODE[recipe] = "1"\n')
182 216
183 def test_archiver_mode_diff(self): 217 def test_archiver_mode_diff(self):
diff --git a/meta/lib/oeqa/selftest/cases/baremetal.py b/meta/lib/oeqa/selftest/cases/baremetal.py
new file mode 100644
index 0000000000..cadaea2f1a
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/baremetal.py
@@ -0,0 +1,14 @@
1
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: MIT
6#
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BaremetalTest(OESelftestTestCase):
12 def test_baremetal(self):
13 self.write_config('TCLIBC = "baremetal"')
14 bitbake('baremetal-helloworld')
diff --git a/meta/lib/oeqa/selftest/cases/bblayers.py b/meta/lib/oeqa/selftest/cases/bblayers.py
index f131d9856c..695d17377d 100644
--- a/meta/lib/oeqa/selftest/cases/bblayers.py
+++ b/meta/lib/oeqa/selftest/cases/bblayers.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -6,12 +8,23 @@ import os
6import re 8import re
7 9
8import oeqa.utils.ftools as ftools 10import oeqa.utils.ftools as ftools
9from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars 11from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12 14
13class BitbakeLayers(OESelftestTestCase): 15class BitbakeLayers(OESelftestTestCase):
14 16
17 @classmethod
18 def setUpClass(cls):
19 super(BitbakeLayers, cls).setUpClass()
20 bitbake("python3-jsonschema-native")
21 bitbake("-c addto_recipe_sysroot python3-jsonschema-native")
22
23 def test_bitbakelayers_layerindexshowdepends(self):
24 result = runCmd('bitbake-layers layerindex-show-depends meta-poky')
25 find_in_contents = re.search("openembedded-core", result.output)
26 self.assertTrue(find_in_contents, msg = "openembedded-core should have been listed at this step. bitbake-layers layerindex-show-depends meta-poky output: %s" % result.output)
27
15 def test_bitbakelayers_showcrossdepends(self): 28 def test_bitbakelayers_showcrossdepends(self):
16 result = runCmd('bitbake-layers show-cross-depends') 29 result = runCmd('bitbake-layers show-cross-depends')
17 self.assertIn('aspell', result.output) 30 self.assertIn('aspell', result.output)
@@ -41,7 +54,7 @@ class BitbakeLayers(OESelftestTestCase):
41 bb_file = os.path.join(testoutdir, recipe_path, recipe_file) 54 bb_file = os.path.join(testoutdir, recipe_path, recipe_file)
42 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.") 55 self.assertTrue(os.path.isfile(bb_file), msg = "Cannot find xcursor-transparent-theme_0.1.1.bb in the test_bitbakelayers_flatten local dir.")
43 contents = ftools.read_file(bb_file) 56 contents = ftools.read_file(bb_file)
44 find_in_contents = re.search("##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents) 57 find_in_contents = re.search(r"##### bbappended from meta-selftest #####\n(.*\n)*include test_recipe.inc", contents)
45 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output) 58 self.assertTrue(find_in_contents, msg = "Flattening layers did not work. bitbake-layers flatten output: %s" % result.output)
46 59
47 def test_bitbakelayers_add_remove(self): 60 def test_bitbakelayers_add_remove(self):
@@ -72,8 +85,9 @@ class BitbakeLayers(OESelftestTestCase):
72 result = runCmd('bitbake-layers show-recipes -i image') 85 result = runCmd('bitbake-layers show-recipes -i image')
73 self.assertIn('core-image-minimal', result.output) 86 self.assertIn('core-image-minimal', result.output)
74 self.assertNotIn('mtd-utils:', result.output) 87 self.assertNotIn('mtd-utils:', result.output)
75 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') 88 result = runCmd('bitbake-layers show-recipes -i meson,pkgconfig')
76 self.assertIn('libproxy:', result.output) 89 self.assertIn('libproxy:', result.output)
90 result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig')
77 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either 91 self.assertNotIn('mtd-utils:', result.output) # doesn't inherit either
78 self.assertNotIn('wget:', result.output) # doesn't inherit cmake 92 self.assertNotIn('wget:', result.output) # doesn't inherit cmake
79 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig 93 self.assertNotIn('waffle:', result.output) # doesn't inherit pkgconfig
@@ -106,6 +120,11 @@ class BitbakeLayers(OESelftestTestCase):
106 120
107 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority)) 121 self.assertEqual(bb_vars['BBFILE_PRIORITY_%s' % layername], str(priority), 'BBFILE_PRIORITY_%s != %d' % (layername, priority))
108 122
123 result = runCmd('bitbake-layers save-build-conf {} {}'.format(layerpath, "buildconf-1"))
124 for f in ('local.conf.sample', 'bblayers.conf.sample', 'conf-summary.txt', 'conf-notes.txt'):
125 fullpath = os.path.join(layerpath, "conf", "templates", "buildconf-1", f)
126 self.assertTrue(os.path.exists(fullpath), "Template configuration file {} not found".format(fullpath))
127
109 def get_recipe_basename(self, recipe): 128 def get_recipe_basename(self, recipe):
110 recipe_file = "" 129 recipe_file = ""
111 result = runCmd("bitbake-layers show-recipes -f %s" % recipe) 130 result = runCmd("bitbake-layers show-recipes -f %s" % recipe)
@@ -116,3 +135,108 @@ class BitbakeLayers(OESelftestTestCase):
116 135
117 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe) 136 self.assertTrue(os.path.isfile(recipe_file), msg = "Can't find recipe file for %s" % recipe)
118 return os.path.basename(recipe_file) 137 return os.path.basename(recipe_file)
138
139 def validate_layersjson(self, json):
140 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'nativepython3')
141 jsonvalidator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-jsonschema-native'), 'jsonschema')
142 jsonschema = os.path.join(get_bb_var('COREBASE'), 'meta/files/layers.schema.json')
143 result = runCmd("{} {} -i {} {}".format(python, jsonvalidator, json, jsonschema))
144
145 def test_validate_examplelayersjson(self):
146 json = os.path.join(get_bb_var('COREBASE'), "meta/files/layers.example.json")
147 self.validate_layersjson(json)
148
149 def test_bitbakelayers_setup(self):
150 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
151 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
152 self.validate_layersjson(jsonfile)
153
154 # The revision-under-test may not necessarily be available on the remote server,
155 # so replace it with a revision that has a yocto-4.1 tag.
156 import json
157 with open(jsonfile) as f:
158 data = json.load(f)
159 for s in data['sources']:
160 data['sources'][s]['git-remote']['rev'] = '5200799866b92259e855051112520006e1aaaac0'
161 with open(jsonfile, 'w') as f:
162 json.dump(data, f)
163
164 testcheckoutdir = os.path.join(self.builddir, 'test-layer-checkout')
165 result = runCmd('{}/setup-layers --destdir {}'.format(self.testlayer_path, testcheckoutdir))
166 layers_json = os.path.join(testcheckoutdir, ".oe-layers.json")
167 self.assertTrue(os.path.exists(layers_json), "File {} not found in test layer checkout".format(layers_json))
168
169 # As setup-layers checkout out an old revision of poky, there is no setup-build symlink,
170 # and we need to run oe-setup-build directly from the current poky tree under test
171 oe_setup_build = os.path.join(get_bb_var('COREBASE'), 'scripts/oe-setup-build')
172 oe_setup_build_l = os.path.join(testcheckoutdir, 'setup-build')
173 os.symlink(oe_setup_build,oe_setup_build_l)
174
175 cmd = '{} --layerlist {} list -v'.format(oe_setup_build_l, layers_json)
176 result = runCmd(cmd)
177 cond = "conf/templates/default" in result.output
178 self.assertTrue(cond, "Incorrect output from {}: {}".format(cmd, result.output))
179
180 # rather than hardcode the build setup cmdline here, let's actually run what the tool suggests to the user
181 conf = None
182 if 'poky-default' in result.output:
183 conf = 'poky-default'
184 elif 'meta-default' in result.output:
185 conf = 'meta-default'
186 self.assertIsNotNone(conf, "Could not find the configuration to set up a build in the output: {}".format(result.output))
187
188 cmd = '{} --layerlist {} setup -c {} --no-shell'.format(oe_setup_build_l, layers_json, conf)
189 result = runCmd(cmd)
190
191 def test_bitbakelayers_updatelayer(self):
192 result = runCmd('bitbake-layers create-layers-setup {}'.format(self.testlayer_path))
193 jsonfile = os.path.join(self.testlayer_path, "setup-layers.json")
194 self.validate_layersjson(jsonfile)
195
196 import json
197 with open(jsonfile) as f:
198 data = json.load(f)
199 repos = []
200 for s in data['sources']:
201 repos.append(s)
202
203 self.assertTrue(len(repos) > 1, "Not enough repositories available")
204 self.validate_layersjson(jsonfile)
205
206 test_ref_1 = 'ref_1'
207 test_ref_2 = 'ref_2'
208
209 # Create a new layers setup using custom references
210 result = runCmd('bitbake-layers create-layers-setup --use-custom-reference {first_repo}:{test_ref} --use-custom-reference {second_repo}:{test_ref} {path}'
211 .format(first_repo=repos[0], second_repo=repos[1], test_ref=test_ref_1, path=self.testlayer_path))
212 self.validate_layersjson(jsonfile)
213
214 with open(jsonfile) as f:
215 data = json.load(f)
216 first_rev_1 = data['sources'][repos[0]]['git-remote']['rev']
217 first_desc_1 = data['sources'][repos[0]]['git-remote']['describe']
218 second_rev_1 = data['sources'][repos[1]]['git-remote']['rev']
219 second_desc_1 = data['sources'][repos[1]]['git-remote']['describe']
220
221 self.assertEqual(first_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(first_rev_1))
222 self.assertEqual(first_desc_1, '', "Describe not cleared: '{}'".format(first_desc_1))
223 self.assertEqual(second_rev_1, test_ref_1, "Revision not set correctly: '{}'".format(second_rev_1))
224 self.assertEqual(second_desc_1, '', "Describe not cleared: '{}'".format(second_desc_1))
225
226 # Update one of the repositories in the layers setup using a different custom reference
227 # This should only update the selected repository, everything else should remain as is
228 result = runCmd('bitbake-layers create-layers-setup --update --use-custom-reference {first_repo}:{test_ref} {path}'
229 .format(first_repo=repos[0], test_ref=test_ref_2, path=self.testlayer_path))
230 self.validate_layersjson(jsonfile)
231
232 with open(jsonfile) as f:
233 data = json.load(f)
234 first_rev_2 = data['sources'][repos[0]]['git-remote']['rev']
235 first_desc_2 = data['sources'][repos[0]]['git-remote']['describe']
236 second_rev_2 = data['sources'][repos[1]]['git-remote']['rev']
237 second_desc_2 = data['sources'][repos[1]]['git-remote']['describe']
238
239 self.assertEqual(first_rev_2, test_ref_2, "Revision not set correctly: '{}'".format(first_rev_2))
240 self.assertEqual(first_desc_2, '', "Describe not cleared: '{}'".format(first_desc_2))
241 self.assertEqual(second_rev_2, second_rev_1, "Revision should not be updated: '{}'".format(second_rev_2))
242 self.assertEqual(second_desc_2, second_desc_1, "Describe should not be updated: '{}'".format(second_desc_2))
diff --git a/meta/lib/oeqa/selftest/cases/bblock.py b/meta/lib/oeqa/selftest/cases/bblock.py
new file mode 100644
index 0000000000..2b62d2a0aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblock.py
@@ -0,0 +1,203 @@
1#
2# Copyright (c) 2023 BayLibre, SAS
3# Author: Julien Stepahn <jstephan@baylibre.com>
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import os
9import re
10import bb.tinfoil
11
12import oeqa.utils.ftools as ftools
13from oeqa.utils.commands import runCmd, get_bb_var, get_bb_vars, bitbake
14
15from oeqa.selftest.case import OESelftestTestCase
16
17
18class BBLock(OESelftestTestCase):
19 @classmethod
20 def setUpClass(cls):
21 super(BBLock, cls).setUpClass()
22 cls.lockfile = cls.builddir + "/conf/bblock.conf"
23
24 def unlock_recipes(self, recipes=None, tasks=None):
25 cmd = "bblock -r "
26 if recipes:
27 cmd += " ".join(recipes)
28 if tasks:
29 cmd += " -t " + ",".join(tasks)
30 result = runCmd(cmd)
31
32 if recipes:
33 # ensure all signatures are removed from lockfile
34 contents = ftools.read_file(self.lockfile)
35 for recipe in recipes:
36 for task in tasks:
37 find_in_contents = re.search(
38 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
39 contents,
40 )
41 self.assertFalse(
42 find_in_contents,
43 msg="%s:%s should not be present into bblock.conf anymore"
44 % (recipe, task),
45 )
46 self.assertExists(self.lockfile)
47 else:
48 self.assertNotExists(self.lockfile)
49
50 def lock_recipes(self, recipes, tasks=None):
51 cmd = "bblock " + " ".join(recipes)
52 if tasks:
53 cmd += " -t " + ",".join(tasks)
54
55 result = runCmd(cmd)
56
57 self.assertExists(self.lockfile)
58
59 # ensure all signatures are added to lockfile
60 contents = ftools.read_file(self.lockfile)
61 for recipe in recipes:
62 if tasks:
63 for task in tasks:
64 find_in_contents = re.search(
65 'SIGGEN_LOCKEDSIGS_.+\s\+=\s"%s:%s:.*"' % (recipe, task),
66 contents,
67 )
68 self.assertTrue(
69 find_in_contents,
70 msg="%s:%s was not added into bblock.conf. bblock output: %s"
71 % (recipe, task, result.output),
72 )
73
74 def modify_tasks(self, recipes, tasks):
75 task_append = ""
76 for recipe in recipes:
77 bb_vars = get_bb_vars(["PV"], recipe)
78 recipe_pv = bb_vars["PV"]
79 recipe_append_file = recipe + "_" + recipe_pv + ".bbappend"
80
81 os.mkdir(os.path.join(self.testlayer_path, "recipes-test", recipe))
82 recipe_append_path = os.path.join(
83 self.testlayer_path, "recipes-test", recipe, recipe_append_file
84 )
85
86 for task in tasks:
87 task_append += "%s:append() {\n#modify task hash \n}\n" % task
88 ftools.write_file(recipe_append_path, task_append)
89 self.add_command_to_tearDown(
90 "rm -rf %s" % os.path.join(self.testlayer_path, "recipes-test", recipe)
91 )
92
93 def test_lock_single_recipe_single_task(self):
94 recipes = ["quilt"]
95 tasks = ["do_compile"]
96 self._run_test(recipes, tasks)
97
98 def test_lock_single_recipe_multiple_tasks(self):
99 recipes = ["quilt"]
100 tasks = ["do_compile", "do_install"]
101 self._run_test(recipes, tasks)
102
103 def test_lock_single_recipe_all_tasks(self):
104 recipes = ["quilt"]
105 self._run_test(recipes, None)
106
107 def test_lock_multiple_recipe_single_task(self):
108 recipes = ["quilt", "bc"]
109 tasks = ["do_compile"]
110 self._run_test(recipes, tasks)
111
112 def test_lock_architecture_specific(self):
113 # unlock all recipes and ensure no bblock.conf file exist
114 self.unlock_recipes()
115
116 recipes = ["quilt"]
117 tasks = ["do_compile"]
118
119 # lock quilt's do_compile task for another machine
120 if self.td["MACHINE"] == "qemux86-64":
121 machine = "qemuarm"
122 else:
123 machine = "qemux86-64"
124
125 self.write_config('MACHINE = "%s"\n' % machine)
126
127 self.lock_recipes(recipes, tasks)
128
129 self.write_config('MACHINE = "%s"\n' % self.td["MACHINE"])
130 # modify quilt's do_compile task
131 self.modify_tasks(recipes, tasks)
132
133 # build quilt using the default machine
134 # No Note/Warning should be emitted since sig is locked for another machine
135 # (quilt package is architecture dependant)
136 info_message = "NOTE: The following recipes have locked tasks: " + recipes[0]
137 warn_message = "The %s:%s sig is computed to be" % (recipes[0], tasks[0])
138 result = bitbake(recipes[0] + " -n")
139 self.assertNotIn(info_message, result.output)
140 self.assertNotIn(warn_message, result.output)
141
142 # unlock all recipes
143 self.unlock_recipes()
144
145 def _run_test(self, recipes, tasks=None):
146 # unlock all recipes and ensure no bblock.conf file exist
147 self.unlock_recipes()
148
149 self.write_config('BB_SIGNATURE_HANDLER = "OEBasicHash"')
150
151 # lock tasks for recipes
152 result = self.lock_recipes(recipes, tasks)
153
154 if not tasks:
155 tasks = []
156 result = bitbake("-c listtasks " + recipes[0])
157 with bb.tinfoil.Tinfoil() as tinfoil:
158 tinfoil.prepare(config_only=False, quiet=2)
159 d = tinfoil.parse_recipe(recipes[0])
160
161 for line in result.output.splitlines():
162 if line.startswith("do_"):
163 task = line.split()[0]
164 if "setscene" in task:
165 continue
166 if d.getVarFlag(task, "nostamp"):
167 continue
168 tasks.append(task)
169
170 # build recipes. At this stage we should have a Note about recipes
171 # having locked task's sig, but no warning since sig still match
172 info_message = "NOTE: The following recipes have locked tasks: " + " ".join(
173 recipes
174 )
175 for recipe in recipes:
176 result = bitbake(recipe + " -n")
177 self.assertIn(info_message, result.output)
178 for task in tasks:
179 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
180 self.assertNotIn(warn_message, result.output)
181
182 # modify all tasks that are locked to trigger a sig change then build the recipes
183 # at this stage we should have a Note as before, but also a Warning for all
184 # locked tasks indicating the sig mismatch
185 self.modify_tasks(recipes, tasks)
186 for recipe in recipes:
187 result = bitbake(recipe + " -n")
188 self.assertIn(info_message, result.output)
189 for task in tasks:
190 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
191 self.assertIn(warn_message, result.output)
192
193 # unlock all tasks and rebuild, no more Note/Warning should remain
194 self.unlock_recipes(recipes, tasks)
195 for recipe in recipes:
196 result = bitbake(recipe + " -n")
197 self.assertNotIn(info_message, result.output)
198 for task in tasks:
199 warn_message = "The %s:%s sig is computed to be" % (recipe, task)
200 self.assertNotIn(warn_message, result.output)
201
202 # unlock all recipes
203 self.unlock_recipes()
diff --git a/meta/lib/oeqa/selftest/cases/bblogging.py b/meta/lib/oeqa/selftest/cases/bblogging.py
new file mode 100644
index 0000000000..040c6db089
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/bblogging.py
@@ -0,0 +1,182 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake
10
11class BitBakeLogging(OESelftestTestCase):
12
13 def assertCount(self, item, entry, count):
14 self.assertEqual(item.count(entry), count, msg="Output:\n'''\n%s\n'''\ndoesn't contain %d copies of:\n'''\n%s\n'''\n" % (item, count, entry))
15
16 def test_shell_loggingA(self):
17 # no logs, no verbose
18 self.write_config('BBINCLUDELOGS = ""')
19 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
20 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
21 self.assertNotIn("This is shell stdout", result.output)
22 self.assertNotIn("This is shell stderr", result.output)
23
24 def test_shell_loggingB(self):
25 # logs, no verbose
26 self.write_config('BBINCLUDELOGS = "yes"')
27 result = bitbake("logging-test -c shelltest -f", ignore_status = True)
28 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
29 self.assertCount(result.output, "This is shell stdout", 1)
30 self.assertCount(result.output, "This is shell stderr", 1)
31
32 def test_shell_loggingC(self):
33 # no logs, verbose
34 self.write_config('BBINCLUDELOGS = ""')
35 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
36 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
37 # two copies due to set +x
38 self.assertCount(result.output, "This is shell stdout", 2)
39 self.assertCount(result.output, "This is shell stderr", 2)
40
41 def test_shell_loggingD(self):
42 # logs, verbose
43 self.write_config('BBINCLUDELOGS = "yes"')
44 result = bitbake("logging-test -c shelltest -f -v", ignore_status = True)
45 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
46 # two copies due to set +x
47 self.assertCount(result.output, "This is shell stdout", 2)
48 self.assertCount(result.output, "This is shell stderr", 2)
49
50 def test_python_exec_func_shell_loggingA(self):
51 # no logs, no verbose
52 self.write_config('BBINCLUDELOGS = ""')
53 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
54 ignore_status = True)
55 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
56 self.assertNotIn("This is shell stdout", result.output)
57 self.assertNotIn("This is shell stderr", result.output)
58
59 def test_python_exec_func_shell_loggingB(self):
60 # logs, no verbose
61 self.write_config('BBINCLUDELOGS = "yes"')
62 result = bitbake("logging-test -c pythontest_exec_func_shell -f",
63 ignore_status = True)
64 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
65 self.assertCount(result.output, "This is shell stdout", 1)
66 self.assertCount(result.output, "This is shell stderr", 1)
67
68 def test_python_exec_func_shell_loggingC(self):
69 # no logs, verbose
70 self.write_config('BBINCLUDELOGS = ""')
71 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
72 ignore_status = True)
73 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
74 # two copies due to set +x
75 self.assertCount(result.output, "This is shell stdout", 2)
76 self.assertCount(result.output, "This is shell stderr", 2)
77
78 def test_python_exec_func_shell_loggingD(self):
79 # logs, verbose
80 self.write_config('BBINCLUDELOGS = "yes"')
81 result = bitbake("logging-test -c pythontest_exec_func_shell -f -v",
82 ignore_status = True)
83 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
84 # two copies due to set +x
85 self.assertCount(result.output, "This is shell stdout", 2)
86 self.assertCount(result.output, "This is shell stderr", 2)
87
88 def test_python_exit_loggingA(self):
89 # no logs, no verbose
90 self.write_config('BBINCLUDELOGS = ""')
91 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
92 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
93 self.assertNotIn("This is python stdout", result.output)
94
95 def test_python_exit_loggingB(self):
96 # logs, no verbose
97 self.write_config('BBINCLUDELOGS = "yes"')
98 result = bitbake("logging-test -c pythontest_exit -f", ignore_status = True)
99 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
100 # A sys.exit() should include the output
101 self.assertCount(result.output, "This is python stdout", 1)
102
103 def test_python_exit_loggingC(self):
104 # no logs, verbose
105 self.write_config('BBINCLUDELOGS = ""')
106 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
107 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
108 self.assertCount(result.output, "This is python stdout", 1)
109
110 def test_python_exit_loggingD(self):
111 # logs, verbose
112 self.write_config('BBINCLUDELOGS = "yes"')
113 result = bitbake("logging-test -c pythontest_exit -f -v", ignore_status = True)
114 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
115 self.assertCount(result.output, "This is python stdout", 1)
116
117 def test_python_exec_func_python_loggingA(self):
118 # no logs, no verbose
119 self.write_config('BBINCLUDELOGS = ""')
120 result = bitbake("logging-test -c pythontest_exec_func_python -f",
121 ignore_status = True)
122 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
123 self.assertNotIn("This is python stdout", result.output)
124
125 def test_python_exec_func_python_loggingB(self):
126 # logs, no verbose
127 self.write_config('BBINCLUDELOGS = "yes"')
128 result = bitbake("logging-test -c pythontest_exec_func_python -f",
129 ignore_status = True)
130 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
131 # A sys.exit() should include the output
132 self.assertCount(result.output, "This is python stdout", 1)
133
134 def test_python_exec_func_python_loggingC(self):
135 # no logs, verbose
136 self.write_config('BBINCLUDELOGS = ""')
137 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
138 ignore_status = True)
139 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
140 self.assertCount(result.output, "This is python stdout", 1)
141
142 def test_python_exec_func_python_loggingD(self):
143 # logs, verbose
144 self.write_config('BBINCLUDELOGS = "yes"')
145 result = bitbake("logging-test -c pythontest_exec_func_python -f -v",
146 ignore_status = True)
147 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
148 self.assertCount(result.output, "This is python stdout", 1)
149
150 def test_python_fatal_loggingA(self):
151 # no logs, no verbose
152 self.write_config('BBINCLUDELOGS = ""')
153 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
154 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
155 self.assertNotIn("This is python fatal test stdout", result.output)
156 self.assertCount(result.output, "This is a fatal error", 1)
157
158 def test_python_fatal_loggingB(self):
159 # logs, no verbose
160 self.write_config('BBINCLUDELOGS = "yes"')
161 result = bitbake("logging-test -c pythontest_fatal -f", ignore_status = True)
162 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
163 # A bb.fatal() should not include the output
164 self.assertNotIn("This is python fatal test stdout", result.output)
165 self.assertCount(result.output, "This is a fatal error", 1)
166
167 def test_python_fatal_loggingC(self):
168 # no logs, verbose
169 self.write_config('BBINCLUDELOGS = ""')
170 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
171 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
172 self.assertCount(result.output, "This is python fatal test stdout", 1)
173 self.assertCount(result.output, "This is a fatal error", 1)
174
175 def test_python_fatal_loggingD(self):
176 # logs, verbose
177 self.write_config('BBINCLUDELOGS = "yes"')
178 result = bitbake("logging-test -c pythontest_fatal -f -v", ignore_status = True)
179 self.assertIn("ERROR: Logfile of failure stored in:", result.output)
180 self.assertCount(result.output, "This is python fatal test stdout", 1)
181 self.assertCount(result.output, "This is a fatal error", 1)
182
diff --git a/meta/lib/oeqa/selftest/cases/bbtests.py b/meta/lib/oeqa/selftest/cases/bbtests.py
index 79390acc0d..98e9f81661 100644
--- a/meta/lib/oeqa/selftest/cases/bbtests.py
+++ b/meta/lib/oeqa/selftest/cases/bbtests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -39,7 +41,7 @@ class BitbakeTests(OESelftestTestCase):
39 41
40 def test_event_handler(self): 42 def test_event_handler(self):
41 self.write_config("INHERIT += \"test_events\"") 43 self.write_config("INHERIT += \"test_events\"")
42 result = bitbake('m4-native') 44 result = bitbake('selftest-hello-native')
43 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output) 45 find_build_started = re.search(r"NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Executing.*Tasks", result.output)
44 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output) 46 find_build_completed = re.search(r"Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
45 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output) 47 self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
@@ -47,11 +49,11 @@ class BitbakeTests(OESelftestTestCase):
47 self.assertNotIn('Test for bb.event.InvalidEvent', result.output) 49 self.assertNotIn('Test for bb.event.InvalidEvent', result.output)
48 50
49 def test_local_sstate(self): 51 def test_local_sstate(self):
50 bitbake('m4-native') 52 bitbake('selftest-hello-native')
51 bitbake('m4-native -cclean') 53 bitbake('selftest-hello-native -cclean')
52 result = bitbake('m4-native') 54 result = bitbake('selftest-hello-native')
53 find_setscene = re.search("m4-native.*do_.*_setscene", result.output) 55 find_setscene = re.search("selftest-hello-native.*do_.*_setscene", result.output)
54 self.assertTrue(find_setscene, msg = "No \"m4-native.*do_.*_setscene\" message found during bitbake m4-native. bitbake output: %s" % result.output ) 56 self.assertTrue(find_setscene, msg = "No \"selftest-hello-native.*do_.*_setscene\" message found during bitbake selftest-hello-native. bitbake output: %s" % result.output )
55 57
56 def test_bitbake_invalid_recipe(self): 58 def test_bitbake_invalid_recipe(self):
57 result = bitbake('-b asdf', ignore_status=True) 59 result = bitbake('-b asdf', ignore_status=True)
@@ -63,15 +65,15 @@ class BitbakeTests(OESelftestTestCase):
63 65
64 def test_warnings_errors(self): 66 def test_warnings_errors(self):
65 result = bitbake('-b asdf', ignore_status=True) 67 result = bitbake('-b asdf', ignore_status=True)
66 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages* shown", result.output) 68 find_warnings = re.search("Summary: There w.{2,3}? [1-9][0-9]* WARNING messages*", result.output)
67 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages* shown", result.output) 69 find_errors = re.search("Summary: There w.{2,3}? [1-9][0-9]* ERROR messages*", result.output)
68 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output) 70 self.assertTrue(find_warnings, msg="Did not find the mumber of warnings at the end of the build:\n" + result.output)
69 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output) 71 self.assertTrue(find_errors, msg="Did not find the mumber of errors at the end of the build:\n" + result.output)
70 72
71 def test_invalid_patch(self): 73 def test_invalid_patch(self):
72 # This patch should fail to apply. 74 # This patch should fail to apply.
73 self.write_recipeinc('man-db', 'FILESEXTRAPATHS_prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"') 75 self.write_recipeinc('man-db', 'FILESEXTRAPATHS:prepend := "${THISDIR}/files:"\nSRC_URI += "file://0001-Test-patch-here.patch"')
74 self.write_config("INHERIT_remove = \"report-error\"") 76 self.write_config("INHERIT:remove = \"report-error\"")
75 result = bitbake('man-db -c patch', ignore_status=True) 77 result = bitbake('man-db -c patch', ignore_status=True)
76 self.delete_recipeinc('man-db') 78 self.delete_recipeinc('man-db')
77 bitbake('-cclean man-db') 79 bitbake('-cclean man-db')
@@ -83,8 +85,10 @@ class BitbakeTests(OESelftestTestCase):
83 85
84 def test_force_task_1(self): 86 def test_force_task_1(self):
85 # test 1 from bug 5875 87 # test 1 from bug 5875
88 import uuid
86 test_recipe = 'zlib' 89 test_recipe = 'zlib'
87 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo" 90 # Need to use uuid otherwise hash equivlance would change the workflow
91 test_data = "Microsoft Made No Profit From Anyone's Zunes Yo %s" % uuid.uuid1()
88 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) 92 bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe)
89 image_dir = bb_vars['D'] 93 image_dir = bb_vars['D']
90 pkgsplit_dir = bb_vars['PKGDEST'] 94 pkgsplit_dir = bb_vars['PKGDEST']
@@ -139,19 +143,14 @@ class BitbakeTests(OESelftestTestCase):
139 self.write_recipeinc('man-db', data) 143 self.write_recipeinc('man-db', data)
140 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 144 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
141SSTATE_DIR = \"${TOPDIR}/download-selftest\" 145SSTATE_DIR = \"${TOPDIR}/download-selftest\"
142INHERIT_remove = \"report-error\" 146INHERIT:remove = \"report-error\"
143""") 147""")
144 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 148 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
145 149
146 bitbake('-ccleanall man-db')
147 result = bitbake('-c fetch man-db', ignore_status=True) 150 result = bitbake('-c fetch man-db', ignore_status=True)
148 bitbake('-ccleanall man-db')
149 self.delete_recipeinc('man-db') 151 self.delete_recipeinc('man-db')
150 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output) 152 self.assertEqual(result.status, 1, msg="Command succeded when it should have failed. bitbake output: %s" % result.output)
151 self.assertIn('Fetcher failure: Unable to find file file://invalid anywhere. The paths that were searched were:', result.output) 153 self.assertIn('Unable to get checksum for man-db SRC_URI entry invalid: file could not be found', result.output)
152 line = self.getline(result, 'Fetcher failure for URL: \'file://invalid\'. Unable to fetch URL from any source.')
153 self.assertTrue(line and line.startswith("ERROR:"), msg = "\"invalid\" file \
154doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result.output)
155 154
156 def test_rename_downloaded_file(self): 155 def test_rename_downloaded_file(self):
157 # TODO unique dldir instead of using cleanall 156 # TODO unique dldir instead of using cleanall
@@ -161,7 +160,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
161""") 160""")
162 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 161 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
163 162
164 data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' 163 data = 'SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"'
165 self.write_recipeinc('aspell', data) 164 self.write_recipeinc('aspell', data)
166 result = bitbake('-f -c fetch aspell', ignore_status=True) 165 result = bitbake('-f -c fetch aspell', ignore_status=True)
167 self.delete_recipeinc('aspell') 166 self.delete_recipeinc('aspell')
@@ -176,7 +175,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
176 self.assertIn('localconf', result.output) 175 self.assertIn('localconf', result.output)
177 176
178 def test_dry_run(self): 177 def test_dry_run(self):
179 result = runCmd('bitbake -n m4-native') 178 result = runCmd('bitbake -n selftest-hello-native')
180 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output) 179 self.assertEqual(0, result.status, "bitbake dry run didn't run as expected. %s" % result.output)
181 180
182 def test_just_parse(self): 181 def test_just_parse(self):
@@ -189,6 +188,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
189 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output) 188 self.assertTrue(find, "No version returned for searched recipe. bitbake output: %s" % result.output)
190 189
191 def test_prefile(self): 190 def test_prefile(self):
191 # Test when the prefile does not exist
192 result = runCmd('bitbake -r conf/prefile.conf', ignore_status=True)
193 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified prefile didn't exist: %s" % result.output)
194 # Test when the prefile exists
192 preconf = os.path.join(self.builddir, 'conf/prefile.conf') 195 preconf = os.path.join(self.builddir, 'conf/prefile.conf')
193 self.track_for_cleanup(preconf) 196 self.track_for_cleanup(preconf)
194 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"") 197 ftools.write_file(preconf ,"TEST_PREFILE=\"prefile\"")
@@ -199,6 +202,10 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
199 self.assertIn('localconf', result.output) 202 self.assertIn('localconf', result.output)
200 203
201 def test_postfile(self): 204 def test_postfile(self):
205 # Test when the postfile does not exist
206 result = runCmd('bitbake -R conf/postfile.conf', ignore_status=True)
207 self.assertEqual(1, result.status, "bitbake didn't error and should have when a specified postfile didn't exist: %s" % result.output)
208 # Test when the postfile exists
202 postconf = os.path.join(self.builddir, 'conf/postfile.conf') 209 postconf = os.path.join(self.builddir, 'conf/postfile.conf')
203 self.track_for_cleanup(postconf) 210 self.track_for_cleanup(postconf)
204 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"") 211 ftools.write_file(postconf , "TEST_POSTFILE=\"postfile\"")
@@ -213,7 +220,7 @@ SSTATE_DIR = \"${TOPDIR}/download-selftest\"
213 def test_continue(self): 220 def test_continue(self):
214 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" 221 self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\"
215SSTATE_DIR = \"${TOPDIR}/download-selftest\" 222SSTATE_DIR = \"${TOPDIR}/download-selftest\"
216INHERIT_remove = \"report-error\" 223INHERIT:remove = \"report-error\"
217""") 224""")
218 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) 225 self.track_for_cleanup(os.path.join(self.builddir, "download-selftest"))
219 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" ) 226 self.write_recipeinc('man-db',"\ndo_fail_task () {\nexit 1 \n}\n\naddtask do_fail_task before do_fetch\n" )
@@ -225,16 +232,21 @@ INHERIT_remove = \"report-error\"
225 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output) 232 self.assertLess(errorpos,continuepos, msg = "bitbake didn't pass do_fail_task. bitbake output: %s" % result.output)
226 233
227 def test_non_gplv3(self): 234 def test_non_gplv3(self):
228 self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"') 235 self.write_config('''INCOMPATIBLE_LICENSE = "GPL-3.0-or-later"
236require conf/distro/include/no-gplv3.inc
237''')
229 result = bitbake('selftest-ed', ignore_status=True) 238 result = bitbake('selftest-ed', ignore_status=True)
230 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) 239 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
231 lic_dir = get_bb_var('LICENSE_DIRECTORY') 240 lic_dir = get_bb_var('LICENSE_DIRECTORY')
232 self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3'))) 241 arch = get_bb_var('SSTATE_PKGARCH')
233 self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2'))) 242 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-3.0-or-later')
243 self.assertFalse(os.path.isfile(filename), msg="License file %s exists and shouldn't" % filename)
244 filename = os.path.join(lic_dir, arch, 'selftest-ed', 'generic_GPL-2.0-or-later')
245 self.assertTrue(os.path.isfile(filename), msg="License file %s doesn't exist" % filename)
234 246
235 def test_setscene_only(self): 247 def test_setscene_only(self):
236 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)""" 248 """ Bitbake option to restore from sstate only within a build (i.e. execute no real tasks, only setscene)"""
237 test_recipe = 'ed' 249 test_recipe = 'selftest-hello-native'
238 250
239 bitbake(test_recipe) 251 bitbake(test_recipe)
240 bitbake('-c clean %s' % test_recipe) 252 bitbake('-c clean %s' % test_recipe)
@@ -247,7 +259,7 @@ INHERIT_remove = \"report-error\"
247 'Executed tasks were: %s' % (task, str(tasks))) 259 'Executed tasks were: %s' % (task, str(tasks)))
248 260
249 def test_skip_setscene(self): 261 def test_skip_setscene(self):
250 test_recipe = 'ed' 262 test_recipe = 'selftest-hello-native'
251 263
252 bitbake(test_recipe) 264 bitbake(test_recipe)
253 bitbake('-c clean %s' % test_recipe) 265 bitbake('-c clean %s' % test_recipe)
@@ -298,3 +310,68 @@ INHERIT_remove = \"report-error\"
298 310
299 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe) 311 test_recipe_summary_after = get_bb_var('SUMMARY', test_recipe)
300 self.assertEqual(expected_recipe_summary, test_recipe_summary_after) 312 self.assertEqual(expected_recipe_summary, test_recipe_summary_after)
313
314 def test_git_patchtool(self):
315 """ PATCHTOOL=git should work with non-git sources like tarballs
316 test recipe for the test must NOT containt git:// repository in SRC_URI
317 """
318 test_recipe = "man-db"
319 self.write_recipeinc(test_recipe, 'PATCHTOOL=\"git\"')
320 src = get_bb_var("SRC_URI",test_recipe)
321 gitscm = re.search("git://", src)
322 self.assertFalse(gitscm, "test_git_patchtool pre-condition failed: {} test recipe contains git repo!".format(test_recipe))
323 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
324 fatal = re.search("fatal: not a git repository (or any of the parent directories)", result.output)
325 self.assertFalse(fatal, "Failed to patch using PATCHTOOL=\"git\"")
326 self.delete_recipeinc(test_recipe)
327 bitbake('-cclean {}'.format(test_recipe))
328
329 def test_git_patchtool2(self):
330 """ Test if PATCHTOOL=git works with git repo and doesn't reinitialize it
331 """
332 test_recipe = "gitrepotest"
333 src = get_bb_var("SRC_URI",test_recipe)
334 gitscm = re.search("git://", src)
335 self.assertTrue(gitscm, "test_git_patchtool pre-condition failed: {} test recipe doesn't contains git repo!".format(test_recipe))
336 result = bitbake('{} -c patch'.format(test_recipe), ignore_status=False)
337 srcdir = get_bb_var('S', test_recipe)
338 result = runCmd("git log", cwd = srcdir)
339 self.assertFalse("bitbake_patching_started" in result.output, msg = "Repository has been reinitialized. {}".format(srcdir))
340 self.delete_recipeinc(test_recipe)
341 bitbake('-cclean {}'.format(test_recipe))
342
343
344 def test_git_unpack_nonetwork(self):
345 """
346 Test that a recipe with a floating tag that needs to be resolved upstream doesn't
347 access the network in a patch task run in a separate builld invocation
348 """
349
350 # Enable the recipe to float using a distro override
351 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
352
353 bitbake('gitunpackoffline -c fetch')
354 bitbake('gitunpackoffline -c patch')
355
356 def test_git_unpack_nonetwork_fail(self):
357 """
358 Test that a recipe with a floating tag which doesn't call get_srcrev() in the fetcher
359 raises an error when the fetcher is called.
360 """
361
362 # Enable the recipe to float using a distro override
363 self.write_config("DISTROOVERRIDES .= \":gitunpack-enable-recipe\"")
364
365 result = bitbake('gitunpackoffline-fail -c fetch', ignore_status=True)
366 self.assertTrue(re.search("Recipe uses a floating tag/branch .* for repo .* without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev()", result.output), msg = "Recipe without PV set to SRCPV should have failed: %s" % result.output)
367
368 def test_unexpanded_variable_in_path(self):
369 """
370 Test that bitbake fails if directory contains unexpanded bitbake variable in the name
371 """
372 recipe_name = "gitunpackoffline"
373 self.write_config('PV:pn-gitunpackoffline:append = "+${UNDEFVAL}"')
374 result = bitbake('{}'.format(recipe_name), ignore_status=True)
375 self.assertGreater(result.status, 0, "Build should have failed if ${ is in the path")
376 self.assertTrue(re.search("ERROR: Directory name /.* contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution",
377 result.output), msg = "mkdirhier with unexpanded variable should have failed: %s" % result.output)
diff --git a/meta/lib/oeqa/selftest/cases/binutils.py b/meta/lib/oeqa/selftest/cases/binutils.py
index 821f52f5a8..1688eabe4e 100644
--- a/meta/lib/oeqa/selftest/cases/binutils.py
+++ b/meta/lib/oeqa/selftest/cases/binutils.py
@@ -1,12 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import time
4import re
5import logging
6from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
7from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
8from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars 11from oeqa.utils.commands import bitbake, get_bb_vars
10 12
11def parse_values(content): 13def parse_values(content):
12 for i in content: 14 for i in content:
@@ -35,15 +37,19 @@ class BinutilsCrossSelfTest(OESelftestTestCase, OEPTestResultTestCase):
35 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe) 37 bb_vars = get_bb_vars(["B", "TARGET_SYS", "T"], recipe)
36 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"] 38 builddir, target_sys, tdir = bb_vars["B"], bb_vars["TARGET_SYS"], bb_vars["T"]
37 39
40 start_time = time.time()
41
38 bitbake("{0} -c check".format(recipe)) 42 bitbake("{0} -c check".format(recipe))
39 43
44 end_time = time.time()
45
40 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite)) 46 sumspath = os.path.join(builddir, suite, "{0}.sum".format(suite))
41 if not os.path.exists(sumspath): 47 if not os.path.exists(sumspath):
42 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite)) 48 sumspath = os.path.join(builddir, suite, "testsuite", "{0}.sum".format(suite))
43 logpath = os.path.splitext(sumspath)[0] + ".log" 49 logpath = os.path.splitext(sumspath)[0] + ".log"
44 50
45 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite 51 ptestsuite = "binutils-{}".format(suite) if suite != "binutils" else suite
46 self.ptest_section(ptestsuite, logfile = logpath) 52 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
47 with open(sumspath, "r") as f: 53 with open(sumspath, "r") as f:
48 for test, result in parse_values(f): 54 for test, result in parse_values(f):
49 self.ptest_result(ptestsuite, test, result) 55 self.ptest_result(ptestsuite, test, result)
diff --git a/meta/lib/oeqa/selftest/cases/buildhistory.py b/meta/lib/oeqa/selftest/cases/buildhistory.py
index d865da6252..2d55994916 100644
--- a/meta/lib/oeqa/selftest/cases/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/buildhistory.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/buildoptions.py b/meta/lib/oeqa/selftest/cases/buildoptions.py
index 3495bee986..31dafaa9c5 100644
--- a/meta/lib/oeqa/selftest/cases/buildoptions.py
+++ b/meta/lib/oeqa/selftest/cases/buildoptions.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,8 +11,10 @@ import shutil
9import tempfile 11import tempfile
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.selftest.cases.buildhistory import BuildhistoryBase 13from oeqa.selftest.cases.buildhistory import BuildhistoryBase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 14from oeqa.core.decorator.data import skipIfMachine
15from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars
13import oeqa.utils.ftools as ftools 16import oeqa.utils.ftools as ftools
17from oeqa.core.decorator import OETestTag
14 18
15class ImageOptionsTests(OESelftestTestCase): 19class ImageOptionsTests(OESelftestTestCase):
16 20
@@ -50,23 +54,23 @@ class ImageOptionsTests(OESelftestTestCase):
50 def test_read_only_image(self): 54 def test_read_only_image(self):
51 distro_features = get_bb_var('DISTRO_FEATURES') 55 distro_features = get_bb_var('DISTRO_FEATURES')
52 if not ('x11' in distro_features and 'opengl' in distro_features): 56 if not ('x11' in distro_features and 'opengl' in distro_features):
53 self.skipTest('core-image-sato requires x11 and opengl in distro features') 57 self.skipTest('core-image-sato/weston requires x11 and opengl in distro features')
54 self.write_config('IMAGE_FEATURES += "read-only-rootfs"') 58 self.write_config('IMAGE_FEATURES += "read-only-rootfs"')
55 bitbake("core-image-sato") 59 bitbake("core-image-sato core-image-weston")
56 # do_image will fail if there are any pending postinsts 60 # do_image will fail if there are any pending postinsts
57 61
58class DiskMonTest(OESelftestTestCase): 62class DiskMonTest(OESelftestTestCase):
59 63
60 def test_stoptask_behavior(self): 64 def test_stoptask_behavior(self):
61 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"') 65 self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
62 res = bitbake("delay -c delay", ignore_status = True) 66 res = bitbake("delay -c delay", ignore_status = True)
63 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output) 67 self.assertTrue('ERROR: No new tasks can be executed since the disk space monitor action is "STOPTASKS"!' in res.output, msg = "Tasks should have stopped. Disk monitor is set to STOPTASK: %s" % res.output)
64 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 68 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
65 self.write_config('BB_DISKMON_DIRS = "ABORT,${TMPDIR},100000G,100K"') 69 self.write_config('BB_DISKMON_DIRS = "HALT,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
66 res = bitbake("delay -c delay", ignore_status = True) 70 res = bitbake("delay -c delay", ignore_status = True)
67 self.assertTrue('ERROR: Immediately abort since the disk space monitor action is "ABORT"!' in res.output, "Tasks should have been aborted immediatelly. Disk monitor is set to ABORT: %s" % res.output) 71 self.assertTrue('ERROR: Immediately halt since the disk space monitor action is "HALT"!' in res.output, "Tasks should have been halted immediately. Disk monitor is set to HALT: %s" % res.output)
68 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 72 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
69 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"') 73 self.write_config('BB_DISKMON_DIRS = "WARN,${TMPDIR},100000G,100K"\nBB_HEARTBEAT_EVENT = "1"')
70 res = bitbake("delay -c delay") 74 res = bitbake("delay -c delay")
71 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output) 75 self.assertTrue('WARNING: The free space' in res.output, msg = "A warning should have been displayed for disk monitor is set to WARN: %s" %res.output)
72 76
@@ -78,9 +82,9 @@ class SanityOptionsTest(OESelftestTestCase):
78 82
79 def test_options_warnqa_errorqa_switch(self): 83 def test_options_warnqa_errorqa_switch(self):
80 84
81 self.write_config("INHERIT_remove = \"report-error\"") 85 self.write_config("INHERIT:remove = \"report-error\"")
82 if "packages-list" not in get_bb_var("ERROR_QA"): 86 if "packages-list" not in get_bb_var("ERROR_QA"):
83 self.append_config("ERROR_QA_append = \" packages-list\"") 87 self.append_config("ERROR_QA:append = \" packages-list\"")
84 88
85 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 89 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
86 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme') 90 self.add_command_to_tearDown('bitbake -c clean xcursor-transparent-theme')
@@ -90,8 +94,8 @@ class SanityOptionsTest(OESelftestTestCase):
90 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output) 94 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
91 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output)) 95 self.assertEqual(res.status, 1, msg = "bitbake reported exit code %s. It should have been 1. Bitbake output: %s" % (str(res.status), res.output))
92 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"') 96 self.write_recipeinc('xcursor-transparent-theme', 'PACKAGES += \"${PN}-dbg\"')
93 self.append_config('ERROR_QA_remove = "packages-list"') 97 self.append_config('ERROR_QA:remove = "packages-list"')
94 self.append_config('WARN_QA_append = " packages-list"') 98 self.append_config('WARN_QA:append = " packages-list"')
95 res = bitbake("xcursor-transparent-theme -f -c package") 99 res = bitbake("xcursor-transparent-theme -f -c package")
96 self.delete_recipeinc('xcursor-transparent-theme') 100 self.delete_recipeinc('xcursor-transparent-theme')
97 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.") 101 line = self.getline(res, "QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors.")
@@ -148,19 +152,48 @@ class BuildhistoryTests(BuildhistoryBase):
148 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) 152 self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True)
149 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error) 153 self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True, error_regex=error)
150 154
155 def test_fileinfo(self):
156 self.config_buildhistory()
157 bitbake('hicolor-icon-theme')
158 history_dir = get_bb_var('BUILDHISTORY_DIR_PACKAGE', 'hicolor-icon-theme')
159 self.assertTrue(os.path.isdir(history_dir), 'buildhistory dir was not created.')
160
161 def load_bh(f):
162 d = {}
163 for line in open(f):
164 split = [s.strip() for s in line.split('=', 1)]
165 if len(split) > 1:
166 d[split[0]] = split[1]
167 return d
168
169 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme', 'latest'))
170 self.assertIn('FILELIST', data)
171 self.assertEqual(data['FILELIST'], '/usr/share/icons/hicolor/index.theme')
172 self.assertGreater(int(data['PKGSIZE']), 0)
173
174 data = load_bh(os.path.join(history_dir, 'hicolor-icon-theme-dev', 'latest'))
175 if 'FILELIST' in data:
176 self.assertEqual(data['FILELIST'], '')
177 self.assertEqual(int(data['PKGSIZE']), 0)
178
151class ArchiverTest(OESelftestTestCase): 179class ArchiverTest(OESelftestTestCase):
152 def test_arch_work_dir_and_export_source(self): 180 def test_arch_work_dir_and_export_source(self):
153 """ 181 """
154 Test for archiving the work directory and exporting the source files. 182 Test for archiving the work directory and exporting the source files.
155 """ 183 """
156 self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"") 184 self.write_config("""
185INHERIT += "archiver"
186PACKAGE_CLASSES = "package_rpm"
187ARCHIVER_MODE[src] = "original"
188ARCHIVER_MODE[srpm] = "1"
189""")
157 res = bitbake("xcursor-transparent-theme", ignore_status=True) 190 res = bitbake("xcursor-transparent-theme", ignore_status=True)
158 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) 191 self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output)
159 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') 192 deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC')
160 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") 193 pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*")
161 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" 194 src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm"
162 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz" 195 tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.xz"
163 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src) 196 self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.xz files under %s/allarch*/xcursor*" % deploy_dir_src)
164 197
165class ToolchainOptions(OESelftestTestCase): 198class ToolchainOptions(OESelftestTestCase):
166 def test_toolchain_fortran(self): 199 def test_toolchain_fortran(self):
@@ -168,10 +201,11 @@ class ToolchainOptions(OESelftestTestCase):
168 Test that Fortran works by building a Hello, World binary. 201 Test that Fortran works by building a Hello, World binary.
169 """ 202 """
170 203
171 features = 'FORTRAN_forcevariable = ",fortran"\n' 204 features = 'FORTRAN:forcevariable = ",fortran"\n'
172 self.write_config(features) 205 self.write_config(features)
173 bitbake('fortran-helloworld') 206 bitbake('fortran-helloworld')
174 207
208@OETestTag("yocto-mirrors")
175class SourceMirroring(OESelftestTestCase): 209class SourceMirroring(OESelftestTestCase):
176 # Can we download everything from the Yocto Sources Mirror over http only 210 # Can we download everything from the Yocto Sources Mirror over http only
177 def test_yocto_source_mirror(self): 211 def test_yocto_source_mirror(self):
@@ -197,3 +231,9 @@ PREMIRRORS = "\\
197 231
198 bitbake("world --runall fetch") 232 bitbake("world --runall fetch")
199 233
234
235class Poisoning(OESelftestTestCase):
236 def test_poisoning(self):
237 res = bitbake("poison", ignore_status=True)
238 self.assertNotEqual(res.status, 0)
239 self.assertTrue("is unsafe for cross-compilation" in res.output)
diff --git a/meta/lib/oeqa/selftest/cases/c_cpp.py b/meta/lib/oeqa/selftest/cases/c_cpp.py
new file mode 100644
index 0000000000..9a70ce29f5
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/c_cpp.py
@@ -0,0 +1,60 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.core.decorator.data import skipIfNotQemuUsermode
9from oeqa.utils.commands import bitbake
10
11
12class CCppTests(OESelftestTestCase):
13
14 @skipIfNotQemuUsermode()
15 def _qemu_usermode(self, recipe_name):
16 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
17 bitbake("%s -c run_tests" % recipe_name)
18
19 @skipIfNotQemuUsermode()
20 def _qemu_usermode_failing(self, recipe_name):
21 config = 'PACKAGECONFIG:pn-%s = "failing_test"' % recipe_name
22 self.write_config(config)
23 self.add_command_to_tearDown("bitbake -c clean %s" % recipe_name)
24 result = bitbake("%s -c run_tests" % recipe_name, ignore_status=True)
25 self.assertNotEqual(0, result.status, "command: %s is expected to fail but passed, status: %s, output: %s, error: %s" % (
26 result.command, result.status, result.output, result.error))
27
28
29class CMakeTests(CCppTests):
30 def test_cmake_qemu(self):
31 """Test for cmake-qemu.bbclass good case
32
33 compile the cmake-example and verify the CTests pass in qemu-user.
34 qemu-user is configured by CMAKE_CROSSCOMPILING_EMULATOR.
35 """
36 self._qemu_usermode("cmake-example")
37
38 def test_cmake_qemu_failing(self):
39 """Test for cmake-qemu.bbclass bad case
40
41 Break the comparison in the test code and verify the CTests do not pass.
42 """
43 self._qemu_usermode_failing("cmake-example")
44
45
46class MesonTests(CCppTests):
47 def test_meson_qemu(self):
48 """Test the qemu-user feature of the meson.bbclass good case
49
50 compile the meson-example and verify the Unit Test pass in qemu-user.
51 qemu-user is configured by meson's exe_wrapper option.
52 """
53 self._qemu_usermode("meson-example")
54
55 def test_meson_qemu_failing(self):
56 """Test the qemu-user feature of the meson.bbclass bad case
57
58 Break the comparison in the test code and verify the Unit Test does not pass in qemu-user.
59 """
60 self._qemu_usermode_failing("meson-example")
diff --git a/meta/lib/oeqa/selftest/cases/containerimage.py b/meta/lib/oeqa/selftest/cases/containerimage.py
index 79cc8a0f2e..23c0a1408a 100644
--- a/meta/lib/oeqa/selftest/cases/containerimage.py
+++ b/meta/lib/oeqa/selftest/cases/containerimage.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -13,7 +15,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
13# The only package added to the image is container_image_testpkg, which 15# The only package added to the image is container_image_testpkg, which
14# contains one file. However, due to some other things not cleaning up during 16# contains one file. However, due to some other things not cleaning up during
15# rootfs creation, there is some cruft. Ideally bugs will be filed and the 17# rootfs creation, there is some cruft. Ideally bugs will be filed and the
16# cruft removed, but for now we whitelist some known set. 18# cruft removed, but for now we ignore some known set.
17# 19#
18# Also for performance reasons we're only checking the cruft when using ipk. 20# Also for performance reasons we're only checking the cruft when using ipk.
19# When using deb, and rpm it is a bit different and we could test all 21# When using deb, and rpm it is a bit different and we could test all
@@ -22,7 +24,7 @@ from oeqa.utils.commands import bitbake, get_bb_vars, runCmd
22# 24#
23class ContainerImageTests(OESelftestTestCase): 25class ContainerImageTests(OESelftestTestCase):
24 26
25 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 27 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
26 # the conversion type bar gets added as a dep as well 28 # the conversion type bar gets added as a dep as well
27 def test_expected_files(self): 29 def test_expected_files(self):
28 30
@@ -43,7 +45,7 @@ PACKAGE_CLASSES = "package_ipk"
43IMAGE_FEATURES = "" 45IMAGE_FEATURES = ""
44IMAGE_BUILDINFO_FILE = "" 46IMAGE_BUILDINFO_FILE = ""
45INIT_MANAGER = "sysvinit" 47INIT_MANAGER = "sysvinit"
46IMAGE_INSTALL_remove = "ssh-pregen-hostkeys" 48IMAGE_INSTALL:remove = "ssh-pregen-hostkeys"
47 49
48""") 50""")
49 51
diff --git a/meta/lib/oeqa/selftest/cases/cve_check.py b/meta/lib/oeqa/selftest/cases/cve_check.py
index 3f343a2841..60cecd1328 100644
--- a/meta/lib/oeqa/selftest/cases/cve_check.py
+++ b/meta/lib/oeqa/selftest/cases/cve_check.py
@@ -1,9 +1,19 @@
1from oe.cve_check import Version 1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
2from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_vars
3 11
4class CVECheck(OESelftestTestCase): 12class CVECheck(OESelftestTestCase):
5 13
6 def test_version_compare(self): 14 def test_version_compare(self):
15 from oe.cve_check import Version
16
7 result = Version("100") > Version("99") 17 result = Version("100") > Version("99")
8 self.assertTrue( result, msg="Failed to compare version '100' > '99'") 18 self.assertTrue( result, msg="Failed to compare version '100' > '99'")
9 result = Version("2.3.1") > Version("2.2.3") 19 result = Version("2.3.1") > Version("2.2.3")
@@ -34,3 +44,199 @@ class CVECheck(OESelftestTestCase):
34 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'") 44 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' < '1.0r'")
35 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical") 45 result = Version("1.0b","alphabetical") > Version("1.0","alphabetical")
36 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'") 46 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0b' > '1.0'")
47
48 # consider the trailing "p" and "patch" as patched released when comparing
49 result = Version("1.0","patch") < Version("1.0p1","patch")
50 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0' < '1.0p1'")
51 result = Version("1.0p2","patch") > Version("1.0p1","patch")
52 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0p2' > '1.0p1'")
53 result = Version("1.0_patch2","patch") < Version("1.0_patch3","patch")
54 self.assertTrue( result ,msg="Failed to compare version with suffix '1.0_patch2' < '1.0_patch3'")
55
56
57 def test_convert_cve_version(self):
58 from oe.cve_check import convert_cve_version
59
60 # Default format
61 self.assertEqual(convert_cve_version("8.3"), "8.3")
62 self.assertEqual(convert_cve_version(""), "")
63
64 # OpenSSL format version
65 self.assertEqual(convert_cve_version("1.1.1t"), "1.1.1t")
66
67 # OpenSSH format
68 self.assertEqual(convert_cve_version("8.3_p1"), "8.3p1")
69 self.assertEqual(convert_cve_version("8.3_p22"), "8.3p22")
70
71 # Linux kernel format
72 self.assertEqual(convert_cve_version("6.2_rc8"), "6.2-rc8")
73 self.assertEqual(convert_cve_version("6.2_rc31"), "6.2-rc31")
74
75
76 def test_recipe_report_json(self):
77 config = """
78INHERIT += "cve-check"
79CVE_CHECK_FORMAT_JSON = "1"
80"""
81 self.write_config(config)
82
83 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
84 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
85 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
86
87 try:
88 os.remove(summary_json)
89 os.remove(recipe_json)
90 except FileNotFoundError:
91 pass
92
93 bitbake("m4-native -c cve_check")
94
95 def check_m4_json(filename):
96 with open(filename) as f:
97 report = json.load(f)
98 self.assertEqual(report["version"], "1")
99 self.assertEqual(len(report["package"]), 1)
100 package = report["package"][0]
101 self.assertEqual(package["name"], "m4-native")
102 found_cves = { issue["id"]: issue["status"] for issue in package["issue"]}
103 self.assertIn("CVE-2008-1687", found_cves)
104 self.assertEqual(found_cves["CVE-2008-1687"], "Patched")
105
106 self.assertExists(summary_json)
107 check_m4_json(summary_json)
108 self.assertExists(recipe_json)
109 check_m4_json(recipe_json)
110
111
112 def test_image_json(self):
113 config = """
114INHERIT += "cve-check"
115CVE_CHECK_FORMAT_JSON = "1"
116"""
117 self.write_config(config)
118
119 vars = get_bb_vars(["CVE_CHECK_DIR", "CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
120 report_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
121 print(report_json)
122 try:
123 os.remove(report_json)
124 except FileNotFoundError:
125 pass
126
127 bitbake("core-image-minimal-initramfs")
128 self.assertExists(report_json)
129
130 # Check that the summary report lists at least one package
131 with open(report_json) as f:
132 report = json.load(f)
133 self.assertEqual(report["version"], "1")
134 self.assertGreater(len(report["package"]), 1)
135
136 # Check that a random recipe wrote a recipe report to deploy/cve/
137 recipename = report["package"][0]["name"]
138 recipe_report = os.path.join(vars["CVE_CHECK_DIR"], recipename + "_cve.json")
139 self.assertExists(recipe_report)
140 with open(recipe_report) as f:
141 report = json.load(f)
142 self.assertEqual(report["version"], "1")
143 self.assertEqual(len(report["package"]), 1)
144 self.assertEqual(report["package"][0]["name"], recipename)
145
146
147 def test_recipe_report_json_unpatched(self):
148 config = """
149INHERIT += "cve-check"
150CVE_CHECK_FORMAT_JSON = "1"
151CVE_CHECK_REPORT_PATCHED = "0"
152"""
153 self.write_config(config)
154
155 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
156 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
157 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "m4-native_cve.json")
158
159 try:
160 os.remove(summary_json)
161 os.remove(recipe_json)
162 except FileNotFoundError:
163 pass
164
165 bitbake("m4-native -c cve_check")
166
167 def check_m4_json(filename):
168 with open(filename) as f:
169 report = json.load(f)
170 self.assertEqual(report["version"], "1")
171 self.assertEqual(len(report["package"]), 1)
172 package = report["package"][0]
173 self.assertEqual(package["name"], "m4-native")
174 #m4 had only Patched CVEs, so the issues array will be empty
175 self.assertEqual(package["issue"], [])
176
177 self.assertExists(summary_json)
178 check_m4_json(summary_json)
179 self.assertExists(recipe_json)
180 check_m4_json(recipe_json)
181
182
183 def test_recipe_report_json_ignored(self):
184 config = """
185INHERIT += "cve-check"
186CVE_CHECK_FORMAT_JSON = "1"
187CVE_CHECK_REPORT_PATCHED = "1"
188"""
189 self.write_config(config)
190
191 vars = get_bb_vars(["CVE_CHECK_SUMMARY_DIR", "CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
192 summary_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], vars["CVE_CHECK_SUMMARY_FILE_NAME_JSON"])
193 recipe_json = os.path.join(vars["CVE_CHECK_SUMMARY_DIR"], "logrotate_cve.json")
194
195 try:
196 os.remove(summary_json)
197 os.remove(recipe_json)
198 except FileNotFoundError:
199 pass
200
201 bitbake("logrotate -c cve_check")
202
203 def check_m4_json(filename):
204 with open(filename) as f:
205 report = json.load(f)
206 self.assertEqual(report["version"], "1")
207 self.assertEqual(len(report["package"]), 1)
208 package = report["package"][0]
209 self.assertEqual(package["name"], "logrotate")
210 found_cves = {}
211 for issue in package["issue"]:
212 found_cves[issue["id"]] = {
213 "status" : issue["status"],
214 "detail" : issue["detail"] if "detail" in issue else "",
215 "description" : issue["description"] if "description" in issue else ""
216 }
217 # m4 CVE should not be in logrotate
218 self.assertNotIn("CVE-2008-1687", found_cves)
219 # logrotate has both Patched and Ignored CVEs
220 self.assertIn("CVE-2011-1098", found_cves)
221 self.assertEqual(found_cves["CVE-2011-1098"]["status"], "Patched")
222 self.assertEqual(len(found_cves["CVE-2011-1098"]["detail"]), 0)
223 self.assertEqual(len(found_cves["CVE-2011-1098"]["description"]), 0)
224 detail = "not-applicable-platform"
225 description = "CVE is debian, gentoo or SUSE specific on the way logrotate was installed/used"
226 self.assertIn("CVE-2011-1548", found_cves)
227 self.assertEqual(found_cves["CVE-2011-1548"]["status"], "Ignored")
228 self.assertEqual(found_cves["CVE-2011-1548"]["detail"], detail)
229 self.assertEqual(found_cves["CVE-2011-1548"]["description"], description)
230 self.assertIn("CVE-2011-1549", found_cves)
231 self.assertEqual(found_cves["CVE-2011-1549"]["status"], "Ignored")
232 self.assertEqual(found_cves["CVE-2011-1549"]["detail"], detail)
233 self.assertEqual(found_cves["CVE-2011-1549"]["description"], description)
234 self.assertIn("CVE-2011-1550", found_cves)
235 self.assertEqual(found_cves["CVE-2011-1550"]["status"], "Ignored")
236 self.assertEqual(found_cves["CVE-2011-1550"]["detail"], detail)
237 self.assertEqual(found_cves["CVE-2011-1550"]["description"], description)
238
239 self.assertExists(summary_json)
240 check_m4_json(summary_json)
241 self.assertExists(recipe_json)
242 check_m4_json(recipe_json)
diff --git a/meta/lib/oeqa/selftest/cases/debuginfod.py b/meta/lib/oeqa/selftest/cases/debuginfod.py
new file mode 100644
index 0000000000..505b4be837
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/debuginfod.py
@@ -0,0 +1,158 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import socketserver
8import subprocess
9import time
10import urllib
11import pathlib
12
13from oeqa.core.decorator import OETestTag
14from oeqa.selftest.case import OESelftestTestCase
15from oeqa.utils.commands import bitbake, get_bb_var, runqemu
16
17
18class Debuginfod(OESelftestTestCase):
19
20 def wait_for_debuginfod(self, port):
21 """
22 debuginfod takes time to scan the packages and requesting too early may
23 result in a test failure if the right packages haven't been scanned yet.
24
25 Request the metrics endpoint periodically and wait for there to be no
26 busy scanning threads.
27
28 Returns if debuginfod is ready, raises an exception if not within the
29 timeout.
30 """
31
32 # Wait two minutes
33 countdown = 24
34 delay = 5
35 latest = None
36
37 while countdown:
38 self.logger.info("waiting...")
39 time.sleep(delay)
40
41 self.logger.info("polling server")
42 if self.debuginfod.poll():
43 self.logger.info("server dead")
44 self.debuginfod.communicate()
45 self.fail("debuginfod terminated unexpectedly")
46 self.logger.info("server alive")
47
48 try:
49 with urllib.request.urlopen("http://localhost:%d/metrics" % port, timeout=10) as f:
50 for line in f.read().decode("ascii").splitlines():
51 key, value = line.rsplit(" ", 1)
52 if key == "thread_busy{role=\"scan\"}":
53 latest = int(value)
54 self.logger.info("Waiting for %d scan jobs to finish" % latest)
55 if latest == 0:
56 return
57 except urllib.error.URLError as e:
58 # TODO: how to catch just timeouts?
59 self.logger.error(e)
60
61 countdown -= 1
62
63 raise TimeoutError("Cannot connect debuginfod, still %d scan jobs running" % latest)
64
65 def start_debuginfod(self):
66 # We assume that the caller has already bitbake'd elfutils-native:do_addto_recipe_sysroot
67
68 # Save some useful paths for later
69 native_sysroot = pathlib.Path(get_bb_var("RECIPE_SYSROOT_NATIVE", "elfutils-native"))
70 native_bindir = native_sysroot / "usr" / "bin"
71 self.debuginfod = native_bindir / "debuginfod"
72 self.debuginfod_find = native_bindir / "debuginfod-find"
73
74 cmd = [
75 self.debuginfod,
76 "--verbose",
77 # In-memory database, this is a one-shot test
78 "--database=:memory:",
79 # Don't use all the host cores
80 "--concurrency=8",
81 "--connection-pool=8",
82 # Disable rescanning, this is a one-shot test
83 "--rescan-time=0",
84 "--groom-time=0",
85 get_bb_var("DEPLOY_DIR"),
86 ]
87
88 format = get_bb_var("PACKAGE_CLASSES").split()[0]
89 if format == "package_deb":
90 cmd.append("--scan-deb-dir")
91 elif format == "package_ipk":
92 cmd.append("--scan-deb-dir")
93 elif format == "package_rpm":
94 cmd.append("--scan-rpm-dir")
95 else:
96 self.fail("Unknown package class %s" % format)
97
98 # Find a free port. Racey but the window is small.
99 with socketserver.TCPServer(("localhost", 0), None) as s:
100 self.port = s.server_address[1]
101 cmd.append("--port=%d" % self.port)
102
103 self.logger.info(f"Starting server {cmd}")
104 self.debuginfod = subprocess.Popen(cmd, env={})
105 self.wait_for_debuginfod(self.port)
106
107
108 def test_debuginfod_native(self):
109 """
110 Test debuginfod outside of qemu, by building a package and looking up a
111 binary's debuginfo using elfutils-native.
112 """
113
114 self.write_config("""
115TMPDIR = "${TOPDIR}/tmp-debuginfod"
116DISTRO_FEATURES:append = " debuginfod"
117""")
118 bitbake("elfutils-native:do_addto_recipe_sysroot xz xz:do_package")
119
120 try:
121 self.start_debuginfod()
122
123 env = os.environ.copy()
124 env["DEBUGINFOD_URLS"] = "http://localhost:%d/" % self.port
125
126 pkgs = pathlib.Path(get_bb_var("PKGDEST", "xz"))
127 cmd = (self.debuginfod_find, "debuginfo", pkgs / "xz" / "usr" / "bin" / "xz.xz")
128 self.logger.info(f"Starting client {cmd}")
129 output = subprocess.check_output(cmd, env=env, text=True)
130 # This should be more comprehensive
131 self.assertIn("/.cache/debuginfod_client/", output)
132 finally:
133 self.debuginfod.kill()
134
135 @OETestTag("runqemu")
136 def test_debuginfod_qemu(self):
137 """
138 Test debuginfod-find inside a qemu, talking to a debuginfod on the host.
139 """
140
141 self.write_config("""
142TMPDIR = "${TOPDIR}/tmp-debuginfod"
143DISTRO_FEATURES:append = " debuginfod"
144CORE_IMAGE_EXTRA_INSTALL += "elfutils xz"
145 """)
146 bitbake("core-image-minimal elfutils-native:do_addto_recipe_sysroot")
147
148 try:
149 self.start_debuginfod()
150
151 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
152 cmd = "DEBUGINFOD_URLS=http://%s:%d/ debuginfod-find debuginfo /usr/bin/xz" % (qemu.server_ip, self.port)
153 self.logger.info(f"Starting client {cmd}")
154 status, output = qemu.run_serial(cmd)
155 # This should be more comprehensive
156 self.assertIn("/.cache/debuginfod_client/", output)
157 finally:
158 self.debuginfod.kill()
diff --git a/meta/lib/oeqa/selftest/cases/devtool.py b/meta/lib/oeqa/selftest/cases/devtool.py
index 3385546e8e..51949e3c93 100644
--- a/meta/lib/oeqa/selftest/cases/devtool.py
+++ b/meta/lib/oeqa/selftest/cases/devtool.py
@@ -1,18 +1,23 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import re 9import re
7import shutil 10import shutil
8import tempfile 11import tempfile
9import glob 12import glob
10import fnmatch 13import fnmatch
14import unittest
15import json
11 16
12import oeqa.utils.ftools as ftools
13from oeqa.selftest.case import OESelftestTestCase 17from oeqa.selftest.case import OESelftestTestCase
14from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer 18from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer
15from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer 19from oeqa.utils.commands import get_bb_vars, runqemu, get_test_layer
20from oeqa.core.decorator import OETestTag
16 21
17oldmetapath = None 22oldmetapath = None
18 23
@@ -24,6 +29,9 @@ def setUpModule():
24 corecopydir = os.path.join(templayerdir, 'core-copy') 29 corecopydir = os.path.join(templayerdir, 'core-copy')
25 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf') 30 bblayers_conf = os.path.join(os.environ['BUILDDIR'], 'conf', 'bblayers.conf')
26 edited_layers = [] 31 edited_layers = []
32 # make sure user doesn't have a local workspace
33 result = runCmd('bitbake-layers show-layers')
34 assert "workspacelayer" not in result.output, "Devtool test suite cannot be run with a local workspace directory"
27 35
28 # We need to take a copy of the meta layer so we can modify it and not 36 # We need to take a copy of the meta layer so we can modify it and not
29 # have any races against other tests that might be running in parallel 37 # have any races against other tests that might be running in parallel
@@ -38,10 +46,17 @@ def setUpModule():
38 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/' 46 canonical_layerpath = os.path.realpath(canonical_layerpath) + '/'
39 edited_layers.append(layerpath) 47 edited_layers.append(layerpath)
40 oldmetapath = os.path.realpath(layerpath) 48 oldmetapath = os.path.realpath(layerpath)
49
50 # when downloading poky from tar.gz some tests will be skipped (BUG 12389)
51 try:
52 runCmd('git rev-parse --is-inside-work-tree', cwd=canonical_layerpath)
53 except:
54 raise unittest.SkipTest("devtool tests require folder to be a git repo")
55
41 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath) 56 result = runCmd('git rev-parse --show-toplevel', cwd=canonical_layerpath)
42 oldreporoot = result.output.rstrip() 57 oldreporoot = result.output.rstrip()
43 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot)) 58 newmetapath = os.path.join(corecopydir, os.path.relpath(oldmetapath, oldreporoot))
44 runCmd('git clone %s %s' % (oldreporoot, corecopydir), cwd=templayerdir) 59 runCmd('git clone file://%s %s' % (oldreporoot, corecopydir), cwd=templayerdir)
45 # Now we need to copy any modified files 60 # Now we need to copy any modified files
46 # You might ask "why not just copy the entire tree instead of 61 # You might ask "why not just copy the entire tree instead of
47 # cloning and doing this?" - well, the problem with that is 62 # cloning and doing this?" - well, the problem with that is
@@ -80,32 +95,15 @@ def tearDownModule():
80 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb) 95 bb.utils.edit_bblayers_conf(bblayers_conf, None, None, bblayers_edit_cb)
81 shutil.rmtree(templayerdir) 96 shutil.rmtree(templayerdir)
82 97
83class DevtoolBase(OESelftestTestCase): 98class DevtoolTestCase(OESelftestTestCase):
84
85 @classmethod
86 def setUpClass(cls):
87 super(DevtoolBase, cls).setUpClass()
88 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
89 cls.original_sstate = bb_vars['SSTATE_DIR']
90 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
91 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
92 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
93 % cls.original_sstate)
94
95 @classmethod
96 def tearDownClass(cls):
97 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
98 runCmd('rm -rf %s' % cls.devtool_sstate)
99 super(DevtoolBase, cls).tearDownClass()
100 99
101 def setUp(self): 100 def setUp(self):
102 """Test case setup function""" 101 """Test case setup function"""
103 super(DevtoolBase, self).setUp() 102 super(DevtoolTestCase, self).setUp()
104 self.workspacedir = os.path.join(self.builddir, 'workspace') 103 self.workspacedir = os.path.join(self.builddir, 'workspace')
105 self.assertTrue(not os.path.exists(self.workspacedir), 104 self.assertTrue(not os.path.exists(self.workspacedir),
106 'This test cannot be run with a workspace directory ' 105 'This test cannot be run with a workspace directory '
107 'under the build directory') 106 'under the build directory')
108 self.append_config(self.sstate_conf)
109 107
110 def _check_src_repo(self, repo_dir): 108 def _check_src_repo(self, repo_dir):
111 """Check srctree git repository""" 109 """Check srctree git repository"""
@@ -235,6 +233,100 @@ class DevtoolBase(OESelftestTestCase):
235 filelist.append(' '.join(splitline)) 233 filelist.append(' '.join(splitline))
236 return filelist 234 return filelist
237 235
236 def _check_diff(self, diffoutput, addlines, removelines):
237 """Check output from 'git diff' matches expectation"""
238 remaining_addlines = addlines[:]
239 remaining_removelines = removelines[:]
240 for line in diffoutput.splitlines():
241 if line.startswith('+++') or line.startswith('---'):
242 continue
243 elif line.startswith('+'):
244 matched = False
245 for item in addlines:
246 if re.match(item, line[1:].strip()):
247 matched = True
248 remaining_addlines.remove(item)
249 break
250 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
251 elif line.startswith('-'):
252 matched = False
253 for item in removelines:
254 if re.match(item, line[1:].strip()):
255 matched = True
256 remaining_removelines.remove(item)
257 break
258 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
259 if remaining_addlines:
260 self.fail('Expected added lines not found: %s' % remaining_addlines)
261 if remaining_removelines:
262 self.fail('Expected removed lines not found: %s' % remaining_removelines)
263
264 def _check_runqemu_prerequisites(self):
265 """Check runqemu is available
266
267 Whilst some tests would seemingly be better placed as a runtime test,
268 unfortunately the runtime tests run under bitbake and you can't run
269 devtool within bitbake (since devtool needs to run bitbake itself).
270 Additionally we are testing build-time functionality as well, so
271 really this has to be done as an oe-selftest test.
272 """
273 machine = get_bb_var('MACHINE')
274 if not machine.startswith('qemu'):
275 self.skipTest('This test only works with qemu machines')
276 if not os.path.exists('/etc/runqemu-nosudo'):
277 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
278 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
279 if result.status != 0:
280 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
281 if result.status != 0:
282 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
283 for line in result.output.splitlines():
284 if line.startswith('tap'):
285 break
286 else:
287 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
288
289 def _test_devtool_add_git_url(self, git_url, version, pn, resulting_src_uri):
290 self.track_for_cleanup(self.workspacedir)
291 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
292 result = runCmd('devtool add --version %s %s %s' % (version, pn, git_url))
293 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
294 # Check the recipe name is correct
295 recipefile = get_bb_var('FILE', pn)
296 self.assertIn('%s_git.bb' % pn, recipefile, 'Recipe file incorrectly named')
297 self.assertIn(recipefile, result.output)
298 # Test devtool status
299 result = runCmd('devtool status')
300 self.assertIn(pn, result.output)
301 self.assertIn(recipefile, result.output)
302 checkvars = {}
303 checkvars['SRC_URI'] = resulting_src_uri
304 self._test_recipe_contents(recipefile, checkvars, [])
305
306class DevtoolBase(DevtoolTestCase):
307
308 @classmethod
309 def setUpClass(cls):
310 super(DevtoolBase, cls).setUpClass()
311 bb_vars = get_bb_vars(['TOPDIR', 'SSTATE_DIR'])
312 cls.original_sstate = bb_vars['SSTATE_DIR']
313 cls.devtool_sstate = os.path.join(bb_vars['TOPDIR'], 'sstate_devtool')
314 cls.sstate_conf = 'SSTATE_DIR = "%s"\n' % cls.devtool_sstate
315 cls.sstate_conf += ('SSTATE_MIRRORS += "file://.* file:///%s/PATH"\n'
316 % cls.original_sstate)
317 cls.sstate_conf += ('BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"\n')
318
319 @classmethod
320 def tearDownClass(cls):
321 cls.logger.debug('Deleting devtool sstate cache on %s' % cls.devtool_sstate)
322 runCmd('rm -rf %s' % cls.devtool_sstate)
323 super(DevtoolBase, cls).tearDownClass()
324
325 def setUp(self):
326 """Test case setup function"""
327 super(DevtoolBase, self).setUp()
328 self.append_config(self.sstate_conf)
329
238 330
239class DevtoolTests(DevtoolBase): 331class DevtoolTests(DevtoolBase):
240 332
@@ -304,6 +396,38 @@ class DevtoolAddTests(DevtoolBase):
304 bindir = bindir[1:] 396 bindir = bindir[1:]
305 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D') 397 self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
306 398
399 def test_devtool_add_binary(self):
400 # Create a binary package containing a known test file
401 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
402 self.track_for_cleanup(tempdir)
403 pn = 'tst-bin'
404 pv = '1.0'
405 test_file_dir = "var/lib/%s/" % pn
406 test_file_name = "test_file"
407 test_file_content = "TEST CONTENT"
408 test_file_package_root = os.path.join(tempdir, pn)
409 test_file_dir_full = os.path.join(test_file_package_root, test_file_dir)
410 bb.utils.mkdirhier(test_file_dir_full)
411 with open(os.path.join(test_file_dir_full, test_file_name), "w") as f:
412 f.write(test_file_content)
413 bin_package_path = os.path.join(tempdir, "%s.tar.gz" % pn)
414 runCmd("tar czf %s -C %s ." % (bin_package_path, test_file_package_root))
415
416 # Test devtool add -b on the binary package
417 self.track_for_cleanup(self.workspacedir)
418 self.add_command_to_tearDown('bitbake -c cleansstate %s' % pn)
419 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
420 result = runCmd('devtool add -b %s %s' % (pn, bin_package_path))
421 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created')
422
423 # Build the resulting recipe
424 result = runCmd('devtool build %s' % pn)
425 installdir = get_bb_var('D', pn)
426 self.assertTrue(installdir, 'Could not query installdir variable')
427
428 # Check that a known file from the binary package has indeed been installed
429 self.assertTrue(os.path.isfile(os.path.join(installdir, test_file_dir, test_file_name)), '%s not found in D' % test_file_name)
430
307 def test_devtool_add_git_local(self): 431 def test_devtool_add_git_local(self):
308 # We need dbus built so that DEPENDS recognition works 432 # We need dbus built so that DEPENDS recognition works
309 bitbake('dbus') 433 bitbake('dbus')
@@ -336,15 +460,31 @@ class DevtoolAddTests(DevtoolBase):
336 self.assertIn(srcdir, result.output) 460 self.assertIn(srcdir, result.output)
337 self.assertIn(recipefile, result.output) 461 self.assertIn(recipefile, result.output)
338 checkvars = {} 462 checkvars = {}
339 checkvars['LICENSE'] = 'GPLv2' 463 checkvars['LICENSE'] = 'GPL-2.0-only'
340 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 464 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
341 checkvars['S'] = '${WORKDIR}/git' 465 checkvars['S'] = '${WORKDIR}/git'
342 checkvars['PV'] = '0.1+git${SRCPV}' 466 checkvars['PV'] = '0.1+git'
343 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https' 467 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/dbus-wait;protocol=https;branch=master'
344 checkvars['SRCREV'] = srcrev 468 checkvars['SRCREV'] = srcrev
345 checkvars['DEPENDS'] = set(['dbus']) 469 checkvars['DEPENDS'] = set(['dbus'])
346 self._test_recipe_contents(recipefile, checkvars, []) 470 self._test_recipe_contents(recipefile, checkvars, [])
347 471
472 def test_devtool_add_git_style1(self):
473 version = 'v3.1.0'
474 pn = 'mbedtls'
475 # this will trigger reformat_git_uri with branch parameter in url
476 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https'"
477 resulting_src_uri = "git://git@github.com/ARMmbed/mbedtls.git;branch=mbedtls-2.28;protocol=https"
478 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
479
480 def test_devtool_add_git_style2(self):
481 version = 'v3.1.0'
482 pn = 'mbedtls'
483 # this will trigger reformat_git_uri with branch parameter in url
484 git_url = "'git://git@github.com/ARMmbed/mbedtls.git;protocol=https'"
485 resulting_src_uri = "gitsm://git@github.com/ARMmbed/mbedtls.git;protocol=https;branch=master"
486 self._test_devtool_add_git_url(git_url, version, pn, resulting_src_uri)
487
348 def test_devtool_add_library(self): 488 def test_devtool_add_library(self):
349 # Fetch source 489 # Fetch source
350 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 490 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -373,7 +513,7 @@ class DevtoolAddTests(DevtoolBase):
373 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version) 513 recipefile = '%s/recipes/libftdi/libftdi_%s.bb' % (self.workspacedir, version)
374 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) 514 result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile)
375 with open(recipefile, 'a') as f: 515 with open(recipefile, 'a') as f:
376 f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n') 516 f.write('\nFILES:${PN}-dev += "${datadir}/cmake/Modules"\n')
377 # We don't have the ability to pick up this dependency automatically yet... 517 # We don't have the ability to pick up this dependency automatically yet...
378 f.write('\nDEPENDS += "libusb1"\n') 518 f.write('\nDEPENDS += "libusb1"\n')
379 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') 519 f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n')
@@ -405,7 +545,7 @@ class DevtoolAddTests(DevtoolBase):
405 self.track_for_cleanup(self.workspacedir) 545 self.track_for_cleanup(self.workspacedir)
406 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe) 546 self.add_command_to_tearDown('bitbake -c cleansstate %s' % testrecipe)
407 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 547 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
408 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url)) 548 result = runCmd('devtool add --no-pypi %s %s -f %s' % (testrecipe, srcdir, url))
409 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output) 549 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. %s' % result.output)
410 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 550 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
411 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created') 551 self.assertTrue(os.path.isdir(os.path.join(srcdir, '.git')), 'git repository for external source tree was not created')
@@ -424,7 +564,7 @@ class DevtoolAddTests(DevtoolBase):
424 result = runCmd('devtool reset -n %s' % testrecipe) 564 result = runCmd('devtool reset -n %s' % testrecipe)
425 shutil.rmtree(srcdir) 565 shutil.rmtree(srcdir)
426 fakever = '1.9' 566 fakever = '1.9'
427 result = runCmd('devtool add %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever)) 567 result = runCmd('devtool add --no-pypi %s %s -f %s -V %s' % (testrecipe, srcdir, url, fakever))
428 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory') 568 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'setup.py')), 'Unable to find setup.py in source directory')
429 # Test devtool status 569 # Test devtool status
430 result = runCmd('devtool status') 570 result = runCmd('devtool status')
@@ -442,6 +582,7 @@ class DevtoolAddTests(DevtoolBase):
442 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 582 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
443 self.track_for_cleanup(tempdir) 583 self.track_for_cleanup(tempdir)
444 url = 'gitsm://git.yoctoproject.org/mraa' 584 url = 'gitsm://git.yoctoproject.org/mraa'
585 url_branch = '%s;branch=master' % url
445 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' 586 checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d'
446 testrecipe = 'mraa' 587 testrecipe = 'mraa'
447 srcdir = os.path.join(tempdir, testrecipe) 588 srcdir = os.path.join(tempdir, testrecipe)
@@ -461,8 +602,8 @@ class DevtoolAddTests(DevtoolBase):
461 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 602 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
462 checkvars = {} 603 checkvars = {}
463 checkvars['S'] = '${WORKDIR}/git' 604 checkvars['S'] = '${WORKDIR}/git'
464 checkvars['PV'] = '1.0+git${SRCPV}' 605 checkvars['PV'] = '1.0+git'
465 checkvars['SRC_URI'] = url 606 checkvars['SRC_URI'] = url_branch
466 checkvars['SRCREV'] = '${AUTOREV}' 607 checkvars['SRCREV'] = '${AUTOREV}'
467 self._test_recipe_contents(recipefile, checkvars, []) 608 self._test_recipe_contents(recipefile, checkvars, [])
468 # Try with revision and version specified 609 # Try with revision and version specified
@@ -480,8 +621,8 @@ class DevtoolAddTests(DevtoolBase):
480 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') 621 self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named')
481 checkvars = {} 622 checkvars = {}
482 checkvars['S'] = '${WORKDIR}/git' 623 checkvars['S'] = '${WORKDIR}/git'
483 checkvars['PV'] = '1.5+git${SRCPV}' 624 checkvars['PV'] = '1.5+git'
484 checkvars['SRC_URI'] = url 625 checkvars['SRC_URI'] = url_branch
485 checkvars['SRCREV'] = checkrev 626 checkvars['SRCREV'] = checkrev
486 self._test_recipe_contents(recipefile, checkvars, []) 627 self._test_recipe_contents(recipefile, checkvars, [])
487 628
@@ -504,7 +645,7 @@ class DevtoolAddTests(DevtoolBase):
504 result = runCmd('devtool status') 645 result = runCmd('devtool status')
505 self.assertIn(testrecipe, result.output) 646 self.assertIn(testrecipe, result.output)
506 self.assertIn(srcdir, result.output) 647 self.assertIn(srcdir, result.output)
507 # Check recipe 648 # Check recipedevtool add
508 recipefile = get_bb_var('FILE', testrecipe) 649 recipefile = get_bb_var('FILE', testrecipe)
509 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named') 650 self.assertIn('%s_%s.bb' % (testrecipe, testver), recipefile, 'Recipe file incorrectly named')
510 checkvars = {} 651 checkvars = {}
@@ -536,6 +677,19 @@ class DevtoolAddTests(DevtoolBase):
536 # Test devtool build 677 # Test devtool build
537 result = runCmd('devtool build %s' % pn) 678 result = runCmd('devtool build %s' % pn)
538 679
680 def test_devtool_add_python_egg_requires(self):
681 # Fetch source
682 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
683 self.track_for_cleanup(tempdir)
684 testver = '0.14.0'
685 url = 'https://files.pythonhosted.org/packages/e9/9e/25d59f5043cf763833b2581c8027fa92342c4cf8ee523b498ecdf460c16d/uvicorn-%s.tar.gz' % testver
686 testrecipe = 'python3-uvicorn'
687 srcdir = os.path.join(tempdir, testrecipe)
688 # Test devtool add
689 self.track_for_cleanup(self.workspacedir)
690 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
691 result = runCmd('devtool add %s %s -f %s' % (testrecipe, srcdir, url))
692
539class DevtoolModifyTests(DevtoolBase): 693class DevtoolModifyTests(DevtoolBase):
540 694
541 def test_devtool_modify(self): 695 def test_devtool_modify(self):
@@ -649,7 +803,7 @@ class DevtoolModifyTests(DevtoolBase):
649 self.track_for_cleanup(self.workspacedir) 803 self.track_for_cleanup(self.workspacedir)
650 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') 804 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
651 805
652 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk meta-ide-support'.split() 806 testrecipes = 'perf kernel-devsrc package-index core-image-minimal meta-toolchain packagegroup-core-sdk'.split()
653 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose 807 # Find actual name of gcc-source since it now includes the version - crude, but good enough for this purpose
654 result = runCmd('bitbake-layers show-recipes gcc-source*') 808 result = runCmd('bitbake-layers show-recipes gcc-source*')
655 for line in result.output.splitlines(): 809 for line in result.output.splitlines():
@@ -697,6 +851,7 @@ class DevtoolModifyTests(DevtoolBase):
697 851
698 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 852 self.assertTrue(bbclassextended, 'None of these recipes are BBCLASSEXTENDed to native - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
699 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes)) 853 self.assertTrue(inheritnative, 'None of these recipes do "inherit native" - need to adjust testrecipes list: %s' % ', '.join(testrecipes))
854
700 def test_devtool_modify_localfiles_only(self): 855 def test_devtool_modify_localfiles_only(self):
701 # Check preconditions 856 # Check preconditions
702 testrecipe = 'base-files' 857 testrecipe = 'base-files'
@@ -763,6 +918,122 @@ class DevtoolModifyTests(DevtoolBase):
763 # Try building 918 # Try building
764 bitbake(testrecipe) 919 bitbake(testrecipe)
765 920
921 def test_devtool_modify_git_no_extract(self):
922 # Check preconditions
923 testrecipe = 'psplash'
924 src_uri = get_bb_var('SRC_URI', testrecipe)
925 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
926 # Clean up anything in the workdir/sysroot/sstate cache
927 bitbake('%s -c cleansstate' % testrecipe)
928 # Try modifying a recipe
929 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
930 self.track_for_cleanup(tempdir)
931 self.track_for_cleanup(self.workspacedir)
932 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
933 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
934 result = runCmd('git clone https://git.yoctoproject.org/psplash %s && devtool modify -n %s %s' % (tempdir, testrecipe, tempdir))
935 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
936 matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'psplash_*.bbappend'))
937 self.assertTrue(matches, 'bbappend not created')
938 # Test devtool status
939 result = runCmd('devtool status')
940 self.assertIn(testrecipe, result.output)
941 self.assertIn(tempdir, result.output)
942
943 def test_devtool_modify_git_crates_subpath(self):
944 # This tests two things in devtool context:
945 # - that we support local git dependencies for cargo based recipe
946 # - that we support patches in SRC_URI when git url contains subpath parameter
947
948 # Check preconditions:
949 # recipe inherits cargo
950 # git:// uri with a subpath as the main package
951 # some crate:// in SRC_URI
952 # others git:// in SRC_URI
953 # cointains a patch
954 testrecipe = 'hello-rs'
955 bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'WORKDIR', 'CARGO_HOME'], testrecipe)
956 recipefile = bb_vars['FILE']
957 workdir = bb_vars['WORKDIR']
958 cargo_home = bb_vars['CARGO_HOME']
959 src_uri = bb_vars['SRC_URI'].split()
960 self.assertTrue(src_uri[0].startswith('git://'),
961 'This test expects the %s recipe to have a git repo has its main uri' % testrecipe)
962 self.assertIn(';subpath=', src_uri[0],
963 'This test expects the %s recipe to have a git uri with subpath' % testrecipe)
964 self.assertTrue(any([uri.startswith('crate://') for uri in src_uri]),
965 'This test expects the %s recipe to have some crates in its src uris' % testrecipe)
966 self.assertGreaterEqual(sum(map(lambda x:x.startswith('git://'), src_uri)), 2,
967 'This test expects the %s recipe to have several git:// uris' % testrecipe)
968 self.assertTrue(any([uri.startswith('file://') and '.patch' in uri for uri in src_uri]),
969 'This test expects the %s recipe to have a patch in its src uris' % testrecipe)
970
971 self._test_recipe_contents(recipefile, {}, ['ptest-cargo'])
972
973 # Clean up anything in the workdir/sysroot/sstate cache
974 bitbake('%s -c cleansstate' % testrecipe)
975 # Try modifying a recipe
976 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
977 self.track_for_cleanup(tempdir)
978 self.track_for_cleanup(self.workspacedir)
979 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
980 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
981 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
982 self.assertExists(os.path.join(tempdir, 'Cargo.toml'), 'Extracted source could not be found')
983 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
984 matches = glob.glob(os.path.join(self.workspacedir, 'appends', '%s_*.bbappend' % testrecipe))
985 self.assertTrue(matches, 'bbappend not created')
986 # Test devtool status
987 result = runCmd('devtool status')
988 self.assertIn(testrecipe, result.output)
989 self.assertIn(tempdir, result.output)
990 # Check git repo
991 self._check_src_repo(tempdir)
992 # Check that the patch is correctly applied.
993 # The last commit message in the tree must contain the following note:
994 # Notes (devtool):
995 # original patch: <patchname>
996 # ..
997 patchname = None
998 for uri in src_uri:
999 if uri.startswith('file://') and '.patch' in uri:
1000 patchname = uri.replace("file://", "").partition('.patch')[0] + '.patch'
1001 self.assertIsNotNone(patchname)
1002 result = runCmd('git -C %s log -1' % tempdir)
1003 self.assertIn("Notes (devtool):\n original patch: %s" % patchname, result.output)
1004
1005 # Configure the recipe to check that the git dependencies are correctly patched in cargo config
1006 bitbake('-c configure %s' % testrecipe)
1007
1008 cargo_config_path = os.path.join(cargo_home, 'config')
1009 with open(cargo_config_path, "r") as f:
1010 cargo_config_contents = [line.strip('\n') for line in f.readlines()]
1011
1012 # Get back git dependencies of the recipe (ignoring the main one)
1013 # and check that they are all correctly patched to be fetched locally
1014 git_deps = [uri for uri in src_uri if uri.startswith("git://")][1:]
1015 for git_dep in git_deps:
1016 raw_url, _, raw_parms = git_dep.partition(";")
1017 parms = {}
1018 for parm in raw_parms.split(";"):
1019 name_parm, _, value_parm = parm.partition('=')
1020 parms[name_parm]=value_parm
1021 self.assertIn('protocol', parms, 'git dependencies uri should contain the "protocol" parameter')
1022 self.assertIn('name', parms, 'git dependencies uri should contain the "name" parameter')
1023 self.assertIn('destsuffix', parms, 'git dependencies uri should contain the "destsuffix" parameter')
1024 self.assertIn('type', parms, 'git dependencies uri should contain the "type" parameter')
1025 self.assertEqual(parms['type'], 'git-dependency', 'git dependencies uri should have "type=git-dependency"')
1026 raw_url = raw_url.replace("git://", '%s://' % parms['protocol'])
1027 patch_line = '[patch."%s"]' % raw_url
1028 path_patched = os.path.join(workdir, parms['destsuffix'])
1029 path_override_line = '%s = { path = "%s" }' % (parms['name'], path_patched)
1030 # Would have been better to use tomllib to read this file :/
1031 self.assertIn(patch_line, cargo_config_contents)
1032 self.assertIn(path_override_line, cargo_config_contents)
1033
1034 # Try to package the recipe
1035 bitbake('-c package_qa %s' % testrecipe)
1036
766 def test_devtool_modify_localfiles(self): 1037 def test_devtool_modify_localfiles(self):
767 # Check preconditions 1038 # Check preconditions
768 testrecipe = 'lighttpd' 1039 testrecipe = 'lighttpd'
@@ -828,12 +1099,43 @@ class DevtoolModifyTests(DevtoolBase):
828 runCmd('git -C %s checkout %s' % (tempdir, branch)) 1099 runCmd('git -C %s checkout %s' % (tempdir, branch))
829 with open(source, "rt") as f: 1100 with open(source, "rt") as f:
830 content = f.read() 1101 content = f.read()
831 self.assertEquals(content, expected) 1102 self.assertEqual(content, expected)
832 check('devtool', 'This is a test for something\n') 1103 if self.td["MACHINE"] == "qemux86":
1104 check('devtool', 'This is a test for qemux86\n')
1105 elif self.td["MACHINE"] == "qemuarm":
1106 check('devtool', 'This is a test for qemuarm\n')
1107 else:
1108 check('devtool', 'This is a test for something\n')
833 check('devtool-no-overrides', 'This is a test for something\n') 1109 check('devtool-no-overrides', 'This is a test for something\n')
834 check('devtool-override-qemuarm', 'This is a test for qemuarm\n') 1110 check('devtool-override-qemuarm', 'This is a test for qemuarm\n')
835 check('devtool-override-qemux86', 'This is a test for qemux86\n') 1111 check('devtool-override-qemux86', 'This is a test for qemux86\n')
836 1112
1113 def test_devtool_modify_multiple_sources(self):
1114 # This test check that recipes fetching several sources can be used with devtool modify/build
1115 # Check preconditions
1116 testrecipe = 'bzip2'
1117 src_uri = get_bb_var('SRC_URI', testrecipe)
1118 src1 = 'https://' in src_uri
1119 src2 = 'git://' in src_uri
1120 self.assertTrue(src1 and src2, 'This test expects the %s recipe to fetch both a git source and a tarball and it seems that it no longer does' % testrecipe)
1121 # Clean up anything in the workdir/sysroot/sstate cache
1122 bitbake('%s -c cleansstate' % testrecipe)
1123 # Try modifying a recipe
1124 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1125 self.track_for_cleanup(tempdir)
1126 self.track_for_cleanup(self.workspacedir)
1127 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1128 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1129 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1130 self.assertEqual(result.status, 0, "Could not modify recipe %s. Output: %s" % (testrecipe, result.output))
1131 # Test devtool status
1132 result = runCmd('devtool status')
1133 self.assertIn(testrecipe, result.output)
1134 self.assertIn(tempdir, result.output)
1135 # Try building
1136 result = bitbake(testrecipe)
1137 self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output))
1138
837class DevtoolUpdateTests(DevtoolBase): 1139class DevtoolUpdateTests(DevtoolBase):
838 1140
839 def test_devtool_update_recipe(self): 1141 def test_devtool_update_recipe(self):
@@ -863,14 +1165,15 @@ class DevtoolUpdateTests(DevtoolBase):
863 result = runCmd('git commit -m "Add a new file"', cwd=tempdir) 1165 result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
864 self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1166 self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
865 result = runCmd('devtool update-recipe %s' % testrecipe) 1167 result = runCmd('devtool update-recipe %s' % testrecipe)
1168 result = runCmd('git add minicom', cwd=os.path.dirname(recipefile))
866 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1169 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
867 ('??', '.*/0001-Change-the-README.patch$'), 1170 ('A ', '.*/0001-Change-the-README.patch$'),
868 ('??', '.*/0002-Add-a-new-file.patch$')] 1171 ('A ', '.*/0002-Add-a-new-file.patch$')]
869 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1172 self._check_repo_status(os.path.dirname(recipefile), expected_status)
870 1173
871 def test_devtool_update_recipe_git(self): 1174 def test_devtool_update_recipe_git(self):
872 # Check preconditions 1175 # Check preconditions
873 testrecipe = 'mtd-utils' 1176 testrecipe = 'mtd-utils-selftest'
874 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1177 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe)
875 recipefile = bb_vars['FILE'] 1178 recipefile = bb_vars['FILE']
876 src_uri = bb_vars['SRC_URI'] 1179 src_uri = bb_vars['SRC_URI']
@@ -904,28 +1207,12 @@ class DevtoolUpdateTests(DevtoolBase):
904 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1207 self._check_repo_status(os.path.dirname(recipefile), expected_status)
905 1208
906 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile)) 1209 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
907 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git"'] 1210 addlines = ['SRCREV = ".*"', 'SRC_URI = "git://git.infradead.org/mtd-utils.git;branch=master"']
908 srcurilines = src_uri.split() 1211 srcurilines = src_uri.split()
909 srcurilines[0] = 'SRC_URI = "' + srcurilines[0] 1212 srcurilines[0] = 'SRC_URI = "' + srcurilines[0]
910 srcurilines.append('"') 1213 srcurilines.append('"')
911 removelines = ['SRCREV = ".*"'] + srcurilines 1214 removelines = ['SRCREV = ".*"'] + srcurilines
912 for line in result.output.splitlines(): 1215 self._check_diff(result.output, addlines, removelines)
913 if line.startswith('+++') or line.startswith('---'):
914 continue
915 elif line.startswith('+'):
916 matched = False
917 for item in addlines:
918 if re.match(item, line[1:].strip()):
919 matched = True
920 break
921 self.assertTrue(matched, 'Unexpected diff add line: %s' % line)
922 elif line.startswith('-'):
923 matched = False
924 for item in removelines:
925 if re.match(item, line[1:].strip()):
926 matched = True
927 break
928 self.assertTrue(matched, 'Unexpected diff remove line: %s' % line)
929 # Now try with auto mode 1216 # Now try with auto mode
930 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile))) 1217 runCmd('cd %s; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, os.path.basename(recipefile)))
931 result = runCmd('devtool update-recipe %s' % testrecipe) 1218 result = runCmd('devtool update-recipe %s' % testrecipe)
@@ -975,7 +1262,7 @@ class DevtoolUpdateTests(DevtoolBase):
975 self.assertExists(patchfile, 'Patch file not created') 1262 self.assertExists(patchfile, 'Patch file not created')
976 1263
977 # Check bbappend contents 1264 # Check bbappend contents
978 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1265 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
979 '\n', 1266 '\n',
980 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n', 1267 'SRC_URI += "file://0001-Add-our-custom-version.patch"\n',
981 '\n'] 1268 '\n']
@@ -990,7 +1277,7 @@ class DevtoolUpdateTests(DevtoolBase):
990 result = runCmd('git reset HEAD^', cwd=tempsrcdir) 1277 result = runCmd('git reset HEAD^', cwd=tempsrcdir)
991 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir)) 1278 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
992 self.assertNotExists(patchfile, 'Patch file not deleted') 1279 self.assertNotExists(patchfile, 'Patch file not deleted')
993 expectedlines2 = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1280 expectedlines2 = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
994 '\n'] 1281 '\n']
995 with open(bbappendfile, 'r') as f: 1282 with open(bbappendfile, 'r') as f:
996 self.assertEqual(expectedlines2, f.readlines()) 1283 self.assertEqual(expectedlines2, f.readlines())
@@ -1007,10 +1294,11 @@ class DevtoolUpdateTests(DevtoolBase):
1007 1294
1008 def test_devtool_update_recipe_append_git(self): 1295 def test_devtool_update_recipe_append_git(self):
1009 # Check preconditions 1296 # Check preconditions
1010 testrecipe = 'mtd-utils' 1297 testrecipe = 'mtd-utils-selftest'
1011 bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) 1298 bb_vars = get_bb_vars(['FILE', 'SRC_URI', 'LAYERSERIES_CORENAMES'], testrecipe)
1012 recipefile = bb_vars['FILE'] 1299 recipefile = bb_vars['FILE']
1013 src_uri = bb_vars['SRC_URI'] 1300 src_uri = bb_vars['SRC_URI']
1301 corenames = bb_vars['LAYERSERIES_CORENAMES']
1014 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) 1302 self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe)
1015 for entry in src_uri.split(): 1303 for entry in src_uri.split():
1016 if entry.startswith('git://'): 1304 if entry.startswith('git://'):
@@ -1041,7 +1329,7 @@ class DevtoolUpdateTests(DevtoolBase):
1041 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n') 1329 f.write('BBFILE_PATTERN_oeselftesttemplayer = "^${LAYERDIR}/"\n')
1042 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n') 1330 f.write('BBFILE_PRIORITY_oeselftesttemplayer = "999"\n')
1043 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n') 1331 f.write('BBFILE_PATTERN_IGNORE_EMPTY_oeselftesttemplayer = "1"\n')
1044 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "${LAYERSERIES_COMPAT_core}"\n') 1332 f.write('LAYERSERIES_COMPAT_oeselftesttemplayer = "%s"\n' % corenames)
1045 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir) 1333 self.add_command_to_tearDown('bitbake-layers remove-layer %s || true' % templayerdir)
1046 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir) 1334 result = runCmd('bitbake-layers add-layer %s' % templayerdir, cwd=self.builddir)
1047 # Create the bbappend 1335 # Create the bbappend
@@ -1117,14 +1405,30 @@ class DevtoolUpdateTests(DevtoolBase):
1117 runCmd('echo "Bar" > new-file', cwd=tempdir) 1405 runCmd('echo "Bar" > new-file', cwd=tempdir)
1118 runCmd('git add new-file', cwd=tempdir) 1406 runCmd('git add new-file', cwd=tempdir)
1119 runCmd('git commit -m "Add new file"', cwd=tempdir) 1407 runCmd('git commit -m "Add new file"', cwd=tempdir)
1120 self.add_command_to_tearDown('cd %s; git clean -fd .; git checkout .' %
1121 os.path.dirname(recipefile))
1122 runCmd('devtool update-recipe %s' % testrecipe) 1408 runCmd('devtool update-recipe %s' % testrecipe)
1123 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)), 1409 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1124 (' M', '.*/makedevs/makedevs.c$'), 1410 (' M', '.*/makedevs/makedevs.c$'),
1125 ('??', '.*/makedevs/new-local$'), 1411 ('??', '.*/makedevs/new-local$'),
1126 ('??', '.*/makedevs/0001-Add-new-file.patch$')] 1412 ('??', '.*/makedevs/0001-Add-new-file.patch$')]
1127 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1413 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1414 # Now try to update recipe in another layer, so first, clean it
1415 runCmd('cd %s; git clean -fd .; git checkout .' % os.path.dirname(recipefile))
1416 # Create a temporary layer and add it to bblayers.conf
1417 self._create_temp_layer(templayerdir, True, 'templayer')
1418 # Update recipe in templayer
1419 result = runCmd('devtool update-recipe %s -a %s' % (testrecipe, templayerdir))
1420 self.assertNotIn('WARNING:', result.output)
1421 # Check recipe is still clean
1422 self._check_repo_status(os.path.dirname(recipefile), [])
1423 splitpath = os.path.dirname(recipefile).split(os.sep)
1424 appenddir = os.path.join(templayerdir, splitpath[-2], splitpath[-1])
1425 bbappendfile = self._check_bbappend(testrecipe, recipefile, appenddir)
1426 patchfile = os.path.join(appenddir, testrecipe, '0001-Add-new-file.patch')
1427 new_local_file = os.path.join(appenddir, testrecipe, 'new_local')
1428 local_file = os.path.join(appenddir, testrecipe, 'makedevs.c')
1429 self.assertExists(patchfile, 'Patch file 0001-Add-new-file.patch not created')
1430 self.assertExists(local_file, 'File makedevs.c not created')
1431 self.assertExists(patchfile, 'File new_local not created')
1128 1432
1129 def test_devtool_update_recipe_local_files_2(self): 1433 def test_devtool_update_recipe_local_files_2(self):
1130 """Check local source files support when oe-local-files is in Git""" 1434 """Check local source files support when oe-local-files is in Git"""
@@ -1259,7 +1563,7 @@ class DevtoolUpdateTests(DevtoolBase):
1259 # Modify one file 1563 # Modify one file
1260 srctree = os.path.join(self.workspacedir, 'sources', testrecipe) 1564 srctree = os.path.join(self.workspacedir, 'sources', testrecipe)
1261 runCmd('echo "Another line" >> README', cwd=srctree) 1565 runCmd('echo "Another line" >> README', cwd=srctree)
1262 runCmd('git commit -a --amend --no-edit', cwd=srctree) 1566 runCmd('git commit -a --amend --no-edit --no-verify', cwd=srctree)
1263 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) 1567 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
1264 result = runCmd('devtool update-recipe %s' % testrecipe) 1568 result = runCmd('devtool update-recipe %s' % testrecipe)
1265 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] 1569 expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)]
@@ -1295,6 +1599,121 @@ class DevtoolUpdateTests(DevtoolBase):
1295 expected_status = [] 1599 expected_status = []
1296 self._check_repo_status(os.path.dirname(recipefile), expected_status) 1600 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1297 1601
1602 def test_devtool_finish_modify_git_subdir(self):
1603 # Check preconditions
1604 testrecipe = 'dos2unix'
1605 self.append_config('ERROR_QA:remove:pn-dos2unix = "patch-status"\n')
1606 bb_vars = get_bb_vars(['SRC_URI', 'S', 'WORKDIR', 'FILE'], testrecipe)
1607 self.assertIn('git://', bb_vars['SRC_URI'], 'This test expects the %s recipe to be a git recipe' % testrecipe)
1608 workdir_git = '%s/git/' % bb_vars['WORKDIR']
1609 if not bb_vars['S'].startswith(workdir_git):
1610 self.fail('This test expects the %s recipe to be building from a subdirectory of the git repo' % testrecipe)
1611 subdir = bb_vars['S'].split(workdir_git, 1)[1]
1612 # Clean up anything in the workdir/sysroot/sstate cache
1613 bitbake('%s -c cleansstate' % testrecipe)
1614 # Try modifying a recipe
1615 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1616 self.track_for_cleanup(tempdir)
1617 self.track_for_cleanup(self.workspacedir)
1618 self.add_command_to_tearDown('bitbake -c clean %s' % testrecipe)
1619 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1620 result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
1621 testsrcfile = os.path.join(tempdir, subdir, 'dos2unix.c')
1622 self.assertExists(testsrcfile, 'Extracted source could not be found')
1623 self.assertExists(os.path.join(self.workspacedir, 'conf', 'layer.conf'), 'Workspace directory not created. devtool output: %s' % result.output)
1624 self.assertNotExists(os.path.join(tempdir, subdir, '.git'), 'Subdirectory has been initialised as a git repo')
1625 # Check git repo
1626 self._check_src_repo(tempdir)
1627 # Modify file
1628 runCmd("sed -i '1s:^:/* Add a comment */\\n:' %s" % testsrcfile)
1629 result = runCmd('git commit -a -m "Add a comment"', cwd=tempdir)
1630 # Now try updating original recipe
1631 recipefile = bb_vars['FILE']
1632 recipedir = os.path.dirname(recipefile)
1633 self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1634 result = runCmd('devtool update-recipe %s' % testrecipe)
1635 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1636 ('??', '.*/%s/%s/$' % (testrecipe, testrecipe))]
1637 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1638 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1639 removelines = ['SRC_URI = "git://.*"']
1640 addlines = [
1641 'SRC_URI = "git://.* \\\\',
1642 'file://0001-Add-a-comment.patch;patchdir=.. \\\\',
1643 '"'
1644 ]
1645 self._check_diff(result.output, addlines, removelines)
1646 # Put things back so we can run devtool finish on a different layer
1647 runCmd('cd %s; rm -f %s/*.patch; git checkout .' % (recipedir, testrecipe))
1648 # Run devtool finish
1649 res = re.search('recipes-.*', recipedir)
1650 self.assertTrue(res, 'Unable to find recipe subdirectory')
1651 recipesubdir = res[0]
1652 self.add_command_to_tearDown('rm -rf %s' % os.path.join(self.testlayer_path, recipesubdir))
1653 result = runCmd('devtool finish %s meta-selftest' % testrecipe)
1654 # Check bbappend file contents
1655 appendfn = os.path.join(self.testlayer_path, recipesubdir, '%s_%%.bbappend' % testrecipe)
1656 with open(appendfn, 'r') as f:
1657 appendlines = f.readlines()
1658 expected_appendlines = [
1659 'FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
1660 '\n',
1661 'SRC_URI += "file://0001-Add-a-comment.patch;patchdir=.."\n',
1662 '\n'
1663 ]
1664 self.assertEqual(appendlines, expected_appendlines)
1665 self.assertExists(os.path.join(os.path.dirname(appendfn), testrecipe, '0001-Add-a-comment.patch'))
1666 # Try building
1667 bitbake('%s -c patch' % testrecipe)
1668
1669 def test_devtool_git_submodules(self):
1670 # This tests if we can add a patch in a git submodule and extract it properly using devtool finish
1671 # Check preconditions
1672 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1673 self.track_for_cleanup(self.workspacedir)
1674 recipe = 'vulkan-samples'
1675 src_uri = get_bb_var('SRC_URI', recipe)
1676 self.assertIn('gitsm://', src_uri, 'This test expects the %s recipe to be a git recipe with submodules' % recipe)
1677 oldrecipefile = get_bb_var('FILE', recipe)
1678 recipedir = os.path.dirname(oldrecipefile)
1679 result = runCmd('git status --porcelain .', cwd=recipedir)
1680 if result.output.strip():
1681 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
1682 self.assertIn('/meta/', recipedir)
1683 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1684 self.track_for_cleanup(tempdir)
1685 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1686 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
1687 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
1688 # Test devtool status
1689 result = runCmd('devtool status')
1690 self.assertIn(recipe, result.output)
1691 self.assertIn(tempdir, result.output)
1692 # Modify a source file in a submodule, (grab the first one)
1693 result = runCmd('git submodule --quiet foreach \'echo $sm_path\'', cwd=tempdir)
1694 submodule = result.output.splitlines()[0]
1695 submodule_path = os.path.join(tempdir, submodule)
1696 runCmd('echo "#This is a first comment" >> testfile', cwd=submodule_path)
1697 result = runCmd('git status --porcelain . ', cwd=submodule_path)
1698 self.assertIn("testfile", result.output)
1699 runCmd('git add testfile; git commit -m "Adding a new file"', cwd=submodule_path)
1700
1701 # Try finish to the original layer
1702 self.add_command_to_tearDown('rm -rf %s ; cd %s ; git checkout %s' % (recipedir, os.path.dirname(recipedir), recipedir))
1703 runCmd('devtool finish -f %s meta' % recipe)
1704 result = runCmd('devtool status')
1705 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
1706 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
1707 expected_status = [(' M', '.*/%s$' % os.path.basename(oldrecipefile)),
1708 ('??', '.*/.*-Adding-a-new-file.patch$')]
1709 self._check_repo_status(recipedir, expected_status)
1710 # Make sure the patch is added to the recipe with the correct "patchdir" option
1711 result = runCmd('git diff .', cwd=recipedir)
1712 addlines = [
1713 'file://0001-Adding-a-new-file.patch;patchdir=%s \\\\' % submodule
1714 ]
1715 self._check_diff(result.output, addlines, [])
1716
1298class DevtoolExtractTests(DevtoolBase): 1717class DevtoolExtractTests(DevtoolBase):
1299 1718
1300 def test_devtool_extract(self): 1719 def test_devtool_extract(self):
@@ -1343,29 +1762,9 @@ class DevtoolExtractTests(DevtoolBase):
1343 matches2 = glob.glob(stampprefix2 + '*') 1762 matches2 = glob.glob(stampprefix2 + '*')
1344 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2) 1763 self.assertFalse(matches2, 'Stamp files exist for recipe %s that should have been cleaned' % testrecipe2)
1345 1764
1765 @OETestTag("runqemu")
1346 def test_devtool_deploy_target(self): 1766 def test_devtool_deploy_target(self):
1347 # NOTE: Whilst this test would seemingly be better placed as a runtime test, 1767 self._check_runqemu_prerequisites()
1348 # unfortunately the runtime tests run under bitbake and you can't run
1349 # devtool within bitbake (since devtool needs to run bitbake itself).
1350 # Additionally we are testing build-time functionality as well, so
1351 # really this has to be done as an oe-selftest test.
1352 #
1353 # Check preconditions
1354 machine = get_bb_var('MACHINE')
1355 if not machine.startswith('qemu'):
1356 self.skipTest('This test only works with qemu machines')
1357 if not os.path.exists('/etc/runqemu-nosudo'):
1358 self.skipTest('You must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1359 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ip tuntap show', ignore_status=True)
1360 if result.status != 0:
1361 result = runCmd('PATH="$PATH:/sbin:/usr/sbin" ifconfig -a', ignore_status=True)
1362 if result.status != 0:
1363 self.skipTest('Failed to determine if tap devices exist with ifconfig or ip: %s' % result.output)
1364 for line in result.output.splitlines():
1365 if line.startswith('tap'):
1366 break
1367 else:
1368 self.skipTest('No tap devices found - you must set up tap devices with scripts/runqemu-gen-tapdevs before running this test')
1369 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1768 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1370 # Definitions 1769 # Definitions
1371 testrecipe = 'mdadm' 1770 testrecipe = 'mdadm'
@@ -1463,6 +1862,14 @@ class DevtoolExtractTests(DevtoolBase):
1463 1862
1464class DevtoolUpgradeTests(DevtoolBase): 1863class DevtoolUpgradeTests(DevtoolBase):
1465 1864
1865 def setUp(self):
1866 super().setUp()
1867 try:
1868 runCmd("git config --global user.name")
1869 runCmd("git config --global user.email")
1870 except:
1871 self.skip("Git user.name and user.email must be set")
1872
1466 def test_devtool_upgrade(self): 1873 def test_devtool_upgrade(self):
1467 # Check preconditions 1874 # Check preconditions
1468 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 1875 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1543,6 +1950,54 @@ class DevtoolUpgradeTests(DevtoolBase):
1543 self.assertNotIn(recipe, result.output) 1950 self.assertNotIn(recipe, result.output)
1544 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting') 1951 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after resetting')
1545 1952
1953 def test_devtool_upgrade_drop_md5sum(self):
1954 # Check preconditions
1955 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1956 self.track_for_cleanup(self.workspacedir)
1957 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1958 # For the moment, we are using a real recipe.
1959 recipe = 'devtool-upgrade-test3'
1960 version = '1.6.0'
1961 oldrecipefile = get_bb_var('FILE', recipe)
1962 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1963 self.track_for_cleanup(tempdir)
1964 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
1965 # we are downgrading instead of upgrading.
1966 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
1967 # Check new recipe file is present
1968 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
1969 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
1970 # Check recipe got changed as expected
1971 with open(oldrecipefile + '.upgraded', 'r') as f:
1972 desiredlines = f.readlines()
1973 with open(newrecipefile, 'r') as f:
1974 newlines = f.readlines()
1975 self.assertEqual(desiredlines, newlines)
1976
1977 def test_devtool_upgrade_all_checksums(self):
1978 # Check preconditions
1979 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
1980 self.track_for_cleanup(self.workspacedir)
1981 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
1982 # For the moment, we are using a real recipe.
1983 recipe = 'devtool-upgrade-test4'
1984 version = '1.6.0'
1985 oldrecipefile = get_bb_var('FILE', recipe)
1986 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
1987 self.track_for_cleanup(tempdir)
1988 # Check upgrade. Code does not check if new PV is older or newer that current PV, so, it may be that
1989 # we are downgrading instead of upgrading.
1990 result = runCmd('devtool upgrade %s %s -V %s' % (recipe, tempdir, version))
1991 # Check new recipe file is present
1992 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipe, '%s_%s.bb' % (recipe, version))
1993 self.assertExists(newrecipefile, 'Recipe file should exist after upgrade')
1994 # Check recipe got changed as expected
1995 with open(oldrecipefile + '.upgraded', 'r') as f:
1996 desiredlines = f.readlines()
1997 with open(newrecipefile, 'r') as f:
1998 newlines = f.readlines()
1999 self.assertEqual(desiredlines, newlines)
2000
1546 def test_devtool_layer_plugins(self): 2001 def test_devtool_layer_plugins(self):
1547 """Test that devtool can use plugins from other layers. 2002 """Test that devtool can use plugins from other layers.
1548 2003
@@ -1561,7 +2016,15 @@ class DevtoolUpgradeTests(DevtoolBase):
1561 for p in paths: 2016 for p in paths:
1562 dstdir = os.path.join(dstdir, p) 2017 dstdir = os.path.join(dstdir, p)
1563 if not os.path.exists(dstdir): 2018 if not os.path.exists(dstdir):
1564 os.makedirs(dstdir) 2019 try:
2020 os.makedirs(dstdir)
2021 except PermissionError:
2022 return False
2023 except OSError as e:
2024 if e.errno == errno.EROFS:
2025 return False
2026 else:
2027 raise e
1565 if p == "lib": 2028 if p == "lib":
1566 # Can race with other tests 2029 # Can race with other tests
1567 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 2030 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -1569,8 +2032,12 @@ class DevtoolUpgradeTests(DevtoolBase):
1569 self.track_for_cleanup(dstdir) 2032 self.track_for_cleanup(dstdir)
1570 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 2033 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
1571 if srcfile != dstfile: 2034 if srcfile != dstfile:
1572 shutil.copy(srcfile, dstfile) 2035 try:
2036 shutil.copy(srcfile, dstfile)
2037 except PermissionError:
2038 return False
1573 self.track_for_cleanup(dstfile) 2039 self.track_for_cleanup(dstfile)
2040 return True
1574 2041
1575 def test_devtool_load_plugin(self): 2042 def test_devtool_load_plugin(self):
1576 """Test that devtool loads only the first found plugin in BBPATH.""" 2043 """Test that devtool loads only the first found plugin in BBPATH."""
@@ -1588,15 +2055,17 @@ class DevtoolUpgradeTests(DevtoolBase):
1588 plugincontent = fh.readlines() 2055 plugincontent = fh.readlines()
1589 try: 2056 try:
1590 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 2057 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
1591 for path in searchpath: 2058 searchpath = [
1592 self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') 2059 path for path in searchpath
2060 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool')
2061 ]
1593 result = runCmd("devtool --quiet count") 2062 result = runCmd("devtool --quiet count")
1594 self.assertEqual(result.output, '1') 2063 self.assertEqual(result.output, '1')
1595 result = runCmd("devtool --quiet multiloaded") 2064 result = runCmd("devtool --quiet multiloaded")
1596 self.assertEqual(result.output, "no") 2065 self.assertEqual(result.output, "no")
1597 for path in searchpath: 2066 for path in searchpath:
1598 result = runCmd("devtool --quiet bbdir") 2067 result = runCmd("devtool --quiet bbdir")
1599 self.assertEqual(result.output, path) 2068 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
1600 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) 2069 os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py'))
1601 finally: 2070 finally:
1602 with open(srcfile, 'w') as fh: 2071 with open(srcfile, 'w') as fh:
@@ -1777,6 +2246,52 @@ class DevtoolUpgradeTests(DevtoolBase):
1777 if files: 2246 if files:
1778 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) 2247 self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files))
1779 2248
2249 def test_devtool_finish_update_patch(self):
2250 # This test uses a modified version of the sysdig recipe from meta-oe.
2251 # - The patches have been renamed.
2252 # - The dependencies are commented out since the recipe is not being
2253 # built.
2254 #
2255 # The sysdig recipe is interesting in that it fetches two different Git
2256 # repositories, and there are patches for both. This leads to that
2257 # devtool will create ignore commits as it uses Git submodules to keep
2258 # track of the second repository.
2259 #
2260 # This test will verify that the ignored commits actually are ignored
2261 # when a commit in between is modified. It will also verify that the
2262 # updated patch keeps its original name.
2263
2264 # Check preconditions
2265 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
2266 # Try modifying a recipe
2267 self.track_for_cleanup(self.workspacedir)
2268 recipe = 'sysdig-selftest'
2269 recipefile = get_bb_var('FILE', recipe)
2270 recipedir = os.path.dirname(recipefile)
2271 result = runCmd('git status --porcelain .', cwd=recipedir)
2272 if result.output.strip():
2273 self.fail('Recipe directory for %s contains uncommitted changes' % recipe)
2274 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2275 self.track_for_cleanup(tempdir)
2276 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2277 result = runCmd('devtool modify %s %s' % (recipe, tempdir))
2278 self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (recipedir, recipe, recipe, os.path.basename(recipefile)))
2279 self.assertExists(os.path.join(tempdir, 'CMakeLists.txt'), 'Extracted source could not be found')
2280 # Make a change to one of the existing commits
2281 result = runCmd('echo "# A comment " >> CMakeLists.txt', cwd=tempdir)
2282 result = runCmd('git status --porcelain', cwd=tempdir)
2283 self.assertIn('M CMakeLists.txt', result.output)
2284 result = runCmd('git commit --fixup HEAD^ CMakeLists.txt', cwd=tempdir)
2285 result = runCmd('git show -s --format=%s', cwd=tempdir)
2286 self.assertIn('fixup! cmake: Pass PROBE_NAME via CFLAGS', result.output)
2287 result = runCmd('GIT_SEQUENCE_EDITOR=true git rebase -i --autosquash devtool-base', cwd=tempdir)
2288 result = runCmd('devtool finish %s meta-selftest' % recipe)
2289 result = runCmd('devtool status')
2290 self.assertNotIn(recipe, result.output, 'Recipe should have been reset by finish but wasn\'t')
2291 self.assertNotExists(os.path.join(self.workspacedir, 'recipes', recipe), 'Recipe directory should not exist after finish')
2292 expected_status = [(' M', '.*/0099-cmake-Pass-PROBE_NAME-via-CFLAGS.patch$')]
2293 self._check_repo_status(recipedir, expected_status)
2294
1780 def test_devtool_rename(self): 2295 def test_devtool_rename(self):
1781 # Check preconditions 2296 # Check preconditions
1782 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') 2297 self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory')
@@ -1813,7 +2328,6 @@ class DevtoolUpgradeTests(DevtoolBase):
1813 self._test_recipe_contents(newrecipefile, checkvars, []) 2328 self._test_recipe_contents(newrecipefile, checkvars, [])
1814 # Try again - change just name this time 2329 # Try again - change just name this time
1815 result = runCmd('devtool reset -n %s' % newrecipename) 2330 result = runCmd('devtool reset -n %s' % newrecipename)
1816 shutil.rmtree(newsrctree)
1817 add_recipe() 2331 add_recipe()
1818 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) 2332 newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever))
1819 result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) 2333 result = runCmd('devtool rename %s %s' % (recipename, newrecipename))
@@ -1826,7 +2340,6 @@ class DevtoolUpgradeTests(DevtoolBase):
1826 self._test_recipe_contents(newrecipefile, checkvars, []) 2340 self._test_recipe_contents(newrecipefile, checkvars, [])
1827 # Try again - change just version this time 2341 # Try again - change just version this time
1828 result = runCmd('devtool reset -n %s' % newrecipename) 2342 result = runCmd('devtool reset -n %s' % newrecipename)
1829 shutil.rmtree(newsrctree)
1830 add_recipe() 2343 add_recipe()
1831 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) 2344 newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever))
1832 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) 2345 result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever))
@@ -1858,8 +2371,9 @@ class DevtoolUpgradeTests(DevtoolBase):
1858 Expected: devtool modify is able to checkout the source of the kernel 2371 Expected: devtool modify is able to checkout the source of the kernel
1859 and modification to the source and configurations are reflected 2372 and modification to the source and configurations are reflected
1860 when building the kernel. 2373 when building the kernel.
1861 """ 2374 """
1862 kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel') 2375 kernel_provider = self.td['PREFERRED_PROVIDER_virtual/kernel']
2376
1863 # Clean up the environment 2377 # Clean up the environment
1864 bitbake('%s -c clean' % kernel_provider) 2378 bitbake('%s -c clean' % kernel_provider)
1865 tempdir = tempfile.mkdtemp(prefix='devtoolqa') 2379 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
@@ -1886,33 +2400,545 @@ class DevtoolUpgradeTests(DevtoolBase):
1886 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found') 2400 self.assertExists(os.path.join(tempdir, 'Makefile'), 'Extracted source could not be found')
1887 #Step 4.2 2401 #Step 4.2
1888 configfile = os.path.join(tempdir,'.config') 2402 configfile = os.path.join(tempdir,'.config')
1889 diff = runCmd('diff %s %s' % (tmpconfig, configfile)) 2403 runCmd('diff %s %s' % (tmpconfig, configfile))
1890 self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool') 2404
1891 #Step 4.3 2405 #Step 4.3
1892 #NOTE: virtual/kernel is mapped to kernel_provider 2406 #NOTE: virtual/kernel is mapped to kernel_provider
1893 result = runCmd('devtool build %s' % kernel_provider) 2407 runCmd('devtool build %s' % kernel_provider)
1894 self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`')
1895 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') 2408 kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux')
1896 self.assertExists(kernelfile, 'Kernel was not build correctly') 2409 self.assertExists(kernelfile, 'Kernel was not build correctly')
1897 2410
1898 #Modify the kernel source 2411 #Modify the kernel source
1899 modfile = os.path.join(tempdir,'arch/x86/boot/header.S') 2412 modfile = os.path.join(tempdir, 'init/version.c')
1900 modstring = "Use a boot loader. Devtool testing." 2413 # Moved to uts.h in 6.1 onwards
1901 modapplied = runCmd("sed -i 's/Use a boot loader./%s/' %s" % (modstring, modfile)) 2414 modfile2 = os.path.join(tempdir, 'include/linux/uts.h')
1902 self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile) 2415 runCmd("sed -i 's/Linux/LiNuX/g' %s %s" % (modfile, modfile2))
2416
1903 #Modify the configuration 2417 #Modify the configuration
1904 codeconfigfile = os.path.join(tempdir,'.config.new') 2418 codeconfigfile = os.path.join(tempdir, '.config.new')
1905 modconfopt = "CONFIG_SG_POOL=n" 2419 modconfopt = "CONFIG_SG_POOL=n"
1906 modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) 2420 runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile))
1907 self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile) 2421
1908 #Build again kernel with devtool 2422 #Build again kernel with devtool
1909 rebuild = runCmd('devtool build %s' % kernel_provider) 2423 runCmd('devtool build %s' % kernel_provider)
1910 self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config') 2424
1911 #Step 4.4 2425 #Step 4.4
1912 bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider) 2426 runCmd("grep '%s' %s" % ('LiNuX', kernelfile))
1913 bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename) 2427
1914 checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile))
1915 self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed')
1916 #Step 4.5 2428 #Step 4.5
1917 checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile)) 2429 runCmd("grep %s %s" % (modconfopt, codeconfigfile))
1918 self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed') 2430
2431
2432class DevtoolIdeSdkTests(DevtoolBase):
2433 def _write_bb_config(self, recipe_names):
2434 """Helper to write the bitbake local.conf file"""
2435 conf_lines = [
2436 'IMAGE_CLASSES += "image-combined-dbg"',
2437 'IMAGE_GEN_DEBUGFS = "1"',
2438 'IMAGE_INSTALL:append = " gdbserver %s"' % ' '.join(
2439 [r + '-ptest' for r in recipe_names])
2440 ]
2441 self.write_config("\n".join(conf_lines))
2442
2443 def _check_workspace(self):
2444 """Check if a workspace directory is available and setup the cleanup"""
2445 self.assertTrue(not os.path.exists(self.workspacedir),
2446 'This test cannot be run with a workspace directory under the build directory')
2447 self.track_for_cleanup(self.workspacedir)
2448 self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
2449
2450 def _workspace_scripts_dir(self, recipe_name):
2451 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts'))
2452
2453 def _sources_scripts_dir(self, src_dir):
2454 return os.path.realpath(os.path.join(src_dir, 'oe-scripts'))
2455
2456 def _workspace_gdbinit_dir(self, recipe_name):
2457 return os.path.realpath(os.path.join(self.builddir, 'workspace', 'ide-sdk', recipe_name, 'scripts', 'gdbinit'))
2458
2459 def _sources_gdbinit_dir(self, src_dir):
2460 return os.path.realpath(os.path.join(src_dir, 'oe-gdbinit'))
2461
2462 def _devtool_ide_sdk_recipe(self, recipe_name, build_file, testimage):
2463 """Setup a recipe for working with devtool ide-sdk
2464
2465 Basically devtool modify -x followed by some tests
2466 """
2467 tempdir = tempfile.mkdtemp(prefix='devtoolqa')
2468 self.track_for_cleanup(tempdir)
2469 self.add_command_to_tearDown('bitbake -c clean %s' % recipe_name)
2470
2471 result = runCmd('devtool modify %s -x %s' % (recipe_name, tempdir))
2472 self.assertExists(os.path.join(tempdir, build_file),
2473 'Extracted source could not be found')
2474 self.assertExists(os.path.join(self.workspacedir, 'conf',
2475 'layer.conf'), 'Workspace directory not created')
2476 matches = glob.glob(os.path.join(self.workspacedir,
2477 'appends', recipe_name + '.bbappend'))
2478 self.assertTrue(matches, 'bbappend not created %s' % result.output)
2479
2480 # Test devtool status
2481 result = runCmd('devtool status')
2482 self.assertIn(recipe_name, result.output)
2483 self.assertIn(tempdir, result.output)
2484 self._check_src_repo(tempdir)
2485
2486 # Usually devtool ide-sdk would initiate the build of the SDK.
2487 # But there is a circular dependency with starting Qemu and passing the IP of runqemu to devtool ide-sdk.
2488 if testimage:
2489 bitbake("%s qemu-native qemu-helper-native" % testimage)
2490 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
2491 self.add_command_to_tearDown('bitbake -c clean %s' % testimage)
2492 self.add_command_to_tearDown(
2493 'rm -f %s/%s*' % (deploy_dir_image, testimage))
2494
2495 return tempdir
2496
2497 def _get_recipe_ids(self, recipe_name):
2498 """IDs needed to write recipe specific config entries into IDE config files"""
2499 package_arch = get_bb_var('PACKAGE_ARCH', recipe_name)
2500 recipe_id = recipe_name + "-" + package_arch
2501 recipe_id_pretty = recipe_name + ": " + package_arch
2502 return (recipe_id, recipe_id_pretty)
2503
2504 def _verify_install_script_code(self, tempdir, recipe_name):
2505 """Verify the scripts referred by the tasks.json file are fine.
2506
2507 This function does not depend on Qemu. Therefore it verifies the scripts
2508 exists and the delete step works as expected. But it does not try to
2509 deploy to Qemu.
2510 """
2511 recipe_id, recipe_id_pretty = self._get_recipe_ids(recipe_name)
2512 with open(os.path.join(tempdir, '.vscode', 'tasks.json')) as tasks_j:
2513 tasks_d = json.load(tasks_j)
2514 tasks = tasks_d["tasks"]
2515 task_install = next(
2516 (task for task in tasks if task["label"] == "install && deploy-target %s" % recipe_id_pretty), None)
2517 self.assertIsNot(task_install, None)
2518 # execute only the bb_run_do_install script since the deploy would require e.g. Qemu running.
2519 i_and_d_script = "install_and_deploy_" + recipe_id
2520 i_and_d_script_path = os.path.join(
2521 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2522 self.assertExists(i_and_d_script_path)
2523 del_script = "delete_package_dirs_" + recipe_id
2524 del_script_path = os.path.join(
2525 self._workspace_scripts_dir(recipe_name), del_script)
2526 self.assertExists(del_script_path)
2527 runCmd(del_script_path, cwd=tempdir)
2528
2529 def _devtool_ide_sdk_qemu(self, tempdir, qemu, recipe_name, example_exe):
2530 """Verify deployment and execution in Qemu system work for one recipe.
2531
2532 This function checks the entire SDK workflow: changing the code, recompiling
2533 it and deploying it back to Qemu, and checking that the changes have been
2534 incorporated into the provided binaries. It also runs the tests of the recipe.
2535 """
2536 recipe_id, _ = self._get_recipe_ids(recipe_name)
2537 i_and_d_script = "install_and_deploy_" + recipe_id
2538 install_deploy_cmd = os.path.join(
2539 self._workspace_scripts_dir(recipe_name), i_and_d_script)
2540 self.assertExists(install_deploy_cmd,
2541 '%s script not found' % install_deploy_cmd)
2542 runCmd(install_deploy_cmd)
2543
2544 MAGIC_STRING_ORIG = "Magic: 123456789"
2545 MAGIC_STRING_NEW = "Magic: 987654321"
2546 ptest_cmd = "ptest-runner " + recipe_name
2547
2548 # validate that SSH is working
2549 status, _ = qemu.run("uname")
2550 self.assertEqual(
2551 status, 0, msg="Failed to connect to the SSH server on Qemu")
2552
2553 # Verify the unmodified example prints the magic string
2554 status, output = qemu.run(example_exe)
2555 self.assertEqual(status, 0, msg="%s failed: %s" %
2556 (example_exe, output))
2557 self.assertIn(MAGIC_STRING_ORIG, output)
2558
2559 # Verify the unmodified ptests work
2560 status, output = qemu.run(ptest_cmd)
2561 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2562 self.assertIn("PASS: cpp-example-lib", output)
2563
2564 # Verify remote debugging works
2565 self._gdb_cross_debugging(
2566 qemu, recipe_name, example_exe, MAGIC_STRING_ORIG)
2567
2568 # Replace the Magic String in the code, compile and deploy to Qemu
2569 cpp_example_lib_hpp = os.path.join(tempdir, 'cpp-example-lib.hpp')
2570 with open(cpp_example_lib_hpp, 'r') as file:
2571 cpp_code = file.read()
2572 cpp_code = cpp_code.replace(MAGIC_STRING_ORIG, MAGIC_STRING_NEW)
2573 with open(cpp_example_lib_hpp, 'w') as file:
2574 file.write(cpp_code)
2575 runCmd(install_deploy_cmd, cwd=tempdir)
2576
2577 # Verify the modified example prints the modified magic string
2578 status, output = qemu.run(example_exe)
2579 self.assertEqual(status, 0, msg="%s failed: %s" %
2580 (example_exe, output))
2581 self.assertNotIn(MAGIC_STRING_ORIG, output)
2582 self.assertIn(MAGIC_STRING_NEW, output)
2583
2584 # Verify the modified example ptests work
2585 status, output = qemu.run(ptest_cmd)
2586 self.assertEqual(status, 0, msg="%s failed: %s" % (ptest_cmd, output))
2587 self.assertIn("PASS: cpp-example-lib", output)
2588
2589 # Verify remote debugging works wit the modified magic string
2590 self._gdb_cross_debugging(
2591 qemu, recipe_name, example_exe, MAGIC_STRING_NEW)
2592
2593 def _gdb_cross(self):
2594 """Verify gdb-cross is provided by devtool ide-sdk"""
2595 target_arch = self.td["TARGET_ARCH"]
2596 target_sys = self.td["TARGET_SYS"]
2597 gdb_recipe = "gdb-cross-" + target_arch
2598 gdb_binary = target_sys + "-gdb"
2599
2600 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
2601 r = runCmd("%s --version" % gdb_binary,
2602 native_sysroot=native_sysroot, target_sys=target_sys)
2603 self.assertEqual(r.status, 0)
2604 self.assertIn("GNU gdb", r.output)
2605
2606 def _gdb_cross_debugging(self, qemu, recipe_name, example_exe, magic_string):
2607 """Verify gdb-cross is working
2608
2609 Test remote debugging:
2610 break main
2611 run
2612 continue
2613 break CppExample::print_json()
2614 continue
2615 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789")
2616 $1 = 0
2617 print CppExample::test_string.compare("cpp-example-lib Magic: 123456789aaa")
2618 $2 = -3
2619 list cpp-example-lib.hpp:13,13
2620 13 inline static const std::string test_string = "cpp-example-lib Magic: 123456789";
2621 continue
2622 """
2623 sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
2624 gdbserver_script = os.path.join(self._workspace_scripts_dir(
2625 recipe_name), 'gdbserver_1234_usr-bin-' + example_exe + '_m')
2626 gdb_script = os.path.join(self._workspace_scripts_dir(
2627 recipe_name), 'gdb_1234_usr-bin-' + example_exe)
2628
2629 # Start a gdbserver
2630 r = runCmd(gdbserver_script)
2631 self.assertEqual(r.status, 0)
2632
2633 # Check there is a gdbserver running
2634 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2635 self.assertEqual(r.status, 0)
2636 self.assertIn("gdbserver ", r.output)
2637
2638 # Check the pid file is correct
2639 test_cmd = "cat /proc/$(cat /tmp/gdbserver_1234_usr-bin-" + \
2640 example_exe + "/pid)/cmdline"
2641 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, test_cmd))
2642 self.assertEqual(r.status, 0)
2643 self.assertIn("gdbserver", r.output)
2644
2645 # Test remote debugging works
2646 gdb_batch_cmd = " --batch -ex 'break main' -ex 'run'"
2647 gdb_batch_cmd += " -ex 'break CppExample::print_json()' -ex 'continue'"
2648 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %s\")'" % magic_string
2649 gdb_batch_cmd += " -ex 'print CppExample::test_string.compare(\"cpp-example-lib %saaa\")'" % magic_string
2650 gdb_batch_cmd += " -ex 'list cpp-example-lib.hpp:13,13'"
2651 gdb_batch_cmd += " -ex 'continue'"
2652 r = runCmd(gdb_script + gdb_batch_cmd)
2653 self.logger.debug("%s %s returned: %s", gdb_script,
2654 gdb_batch_cmd, r.output)
2655 self.assertEqual(r.status, 0)
2656 self.assertIn("Breakpoint 1, main", r.output)
2657 self.assertIn("$1 = 0", r.output) # test.string.compare equal
2658 self.assertIn("$2 = -3", r.output) # test.string.compare longer
2659 self.assertIn(
2660 'inline static const std::string test_string = "cpp-example-lib %s";' % magic_string, r.output)
2661 self.assertIn("exited normally", r.output)
2662
2663 # Stop the gdbserver
2664 r = runCmd(gdbserver_script + ' stop')
2665 self.assertEqual(r.status, 0)
2666
2667 # Check there is no gdbserver running
2668 r = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, 'ps'))
2669 self.assertEqual(r.status, 0)
2670 self.assertNotIn("gdbserver ", r.output)
2671
2672 def _verify_cmake_preset(self, tempdir):
2673 """Verify the generated cmake preset works as expected
2674
2675 Check if compiling works
2676 Check if unit tests can be executed in qemu (not qemu-system)
2677 """
2678 with open(os.path.join(tempdir, 'CMakeUserPresets.json')) as cmake_preset_j:
2679 cmake_preset_d = json.load(cmake_preset_j)
2680 config_presets = cmake_preset_d["configurePresets"]
2681 self.assertEqual(len(config_presets), 1)
2682 cmake_exe = config_presets[0]["cmakeExecutable"]
2683 preset_name = config_presets[0]["name"]
2684
2685 # Verify the wrapper for cmake native is available
2686 self.assertExists(cmake_exe)
2687
2688 # Verify the cmake preset generated by devtool ide-sdk is available
2689 result = runCmd('%s --list-presets' % cmake_exe, cwd=tempdir)
2690 self.assertIn(preset_name, result.output)
2691
2692 # Verify cmake re-uses the o files compiled by bitbake
2693 result = runCmd('%s --build --preset %s' %
2694 (cmake_exe, preset_name), cwd=tempdir)
2695 self.assertIn("ninja: no work to do.", result.output)
2696
2697 # Verify the unit tests work (in Qemu user mode)
2698 result = runCmd('%s --build --preset %s --target test' %
2699 (cmake_exe, preset_name), cwd=tempdir)
2700 self.assertIn("100% tests passed", result.output)
2701
2702 # Verify re-building and testing works again
2703 result = runCmd('%s --build --preset %s --target clean' %
2704 (cmake_exe, preset_name), cwd=tempdir)
2705 self.assertIn("Cleaning", result.output)
2706 result = runCmd('%s --build --preset %s' %
2707 (cmake_exe, preset_name), cwd=tempdir)
2708 self.assertIn("Building", result.output)
2709 self.assertIn("Linking", result.output)
2710 result = runCmd('%s --build --preset %s --target test' %
2711 (cmake_exe, preset_name), cwd=tempdir)
2712 self.assertIn("Running tests...", result.output)
2713 self.assertIn("100% tests passed", result.output)
2714
2715 @OETestTag("runqemu")
2716 def test_devtool_ide_sdk_none_qemu(self):
2717 """Start qemu-system and run tests for multiple recipes. ide=none is used."""
2718 recipe_names = ["cmake-example", "meson-example"]
2719 testimage = "oe-selftest-image"
2720
2721 self._check_workspace()
2722 self._write_bb_config(recipe_names)
2723 self._check_runqemu_prerequisites()
2724
2725 # Verify deployment to Qemu (system mode) works
2726 bitbake(testimage)
2727 with runqemu(testimage, runqemuparams="nographic") as qemu:
2728 # cmake-example recipe
2729 recipe_name = "cmake-example"
2730 example_exe = "cmake-example"
2731 build_file = "CMakeLists.txt"
2732 tempdir = self._devtool_ide_sdk_recipe(
2733 recipe_name, build_file, testimage)
2734 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2735 recipe_name, testimage, qemu.ip)
2736 runCmd(bitbake_sdk_cmd)
2737 self._gdb_cross()
2738 self._verify_cmake_preset(tempdir)
2739 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2740 # Verify the oe-scripts sym-link is valid
2741 self.assertEqual(self._workspace_scripts_dir(
2742 recipe_name), self._sources_scripts_dir(tempdir))
2743
2744 # meson-example recipe
2745 recipe_name = "meson-example"
2746 example_exe = "mesonex"
2747 build_file = "meson.build"
2748 tempdir = self._devtool_ide_sdk_recipe(
2749 recipe_name, build_file, testimage)
2750 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@%s -c --ide=none' % (
2751 recipe_name, testimage, qemu.ip)
2752 runCmd(bitbake_sdk_cmd)
2753 self._gdb_cross()
2754 self._devtool_ide_sdk_qemu(tempdir, qemu, recipe_name, example_exe)
2755 # Verify the oe-scripts sym-link is valid
2756 self.assertEqual(self._workspace_scripts_dir(
2757 recipe_name), self._sources_scripts_dir(tempdir))
2758
2759 def test_devtool_ide_sdk_code_cmake(self):
2760 """Verify a cmake recipe works with ide=code mode"""
2761 recipe_name = "cmake-example"
2762 build_file = "CMakeLists.txt"
2763 testimage = "oe-selftest-image"
2764
2765 self._check_workspace()
2766 self._write_bb_config([recipe_name])
2767 tempdir = self._devtool_ide_sdk_recipe(
2768 recipe_name, build_file, testimage)
2769 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2770 recipe_name, testimage)
2771 runCmd(bitbake_sdk_cmd)
2772 self._verify_cmake_preset(tempdir)
2773 self._verify_install_script_code(tempdir, recipe_name)
2774 self._gdb_cross()
2775
2776 def test_devtool_ide_sdk_code_meson(self):
2777 """Verify a meson recipe works with ide=code mode"""
2778 recipe_name = "meson-example"
2779 build_file = "meson.build"
2780 testimage = "oe-selftest-image"
2781
2782 self._check_workspace()
2783 self._write_bb_config([recipe_name])
2784 tempdir = self._devtool_ide_sdk_recipe(
2785 recipe_name, build_file, testimage)
2786 bitbake_sdk_cmd = 'devtool ide-sdk %s %s -t root@192.168.17.17 -c --ide=code' % (
2787 recipe_name, testimage)
2788 runCmd(bitbake_sdk_cmd)
2789
2790 with open(os.path.join(tempdir, '.vscode', 'settings.json')) as settings_j:
2791 settings_d = json.load(settings_j)
2792 meson_exe = settings_d["mesonbuild.mesonPath"]
2793 meson_build_folder = settings_d["mesonbuild.buildFolder"]
2794
2795 # Verify the wrapper for meson native is available
2796 self.assertExists(meson_exe)
2797
2798 # Verify meson re-uses the o files compiled by bitbake
2799 result = runCmd('%s compile -C %s' %
2800 (meson_exe, meson_build_folder), cwd=tempdir)
2801 self.assertIn("ninja: no work to do.", result.output)
2802
2803 # Verify the unit tests work (in Qemu)
2804 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2805
2806 # Verify re-building and testing works again
2807 result = runCmd('%s compile -C %s --clean' %
2808 (meson_exe, meson_build_folder), cwd=tempdir)
2809 self.assertIn("Cleaning...", result.output)
2810 result = runCmd('%s compile -C %s' %
2811 (meson_exe, meson_build_folder), cwd=tempdir)
2812 self.assertIn("Linking target", result.output)
2813 runCmd('%s test -C %s' % (meson_exe, meson_build_folder), cwd=tempdir)
2814
2815 self._verify_install_script_code(tempdir, recipe_name)
2816 self._gdb_cross()
2817
2818 def test_devtool_ide_sdk_shared_sysroots(self):
2819 """Verify the shared sysroot SDK"""
2820
2821 # Handle the workspace (which is not needed by this test case)
2822 self._check_workspace()
2823
2824 result_init = runCmd(
2825 'devtool ide-sdk -m shared oe-selftest-image cmake-example meson-example --ide=code')
2826 bb_vars = get_bb_vars(
2827 ['REAL_MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'], "meta-ide-support")
2828 environment_script = 'environment-setup-%s' % bb_vars['REAL_MULTIMACH_TARGET_SYS']
2829 deploydir = bb_vars['DEPLOY_DIR_IMAGE']
2830 environment_script_path = os.path.join(deploydir, environment_script)
2831 cpp_example_src = os.path.join(
2832 bb_vars['COREBASE'], 'meta-selftest', 'recipes-test', 'cpp', 'files')
2833
2834 # Verify the cross environment script is available
2835 self.assertExists(environment_script_path)
2836
2837 def runCmdEnv(cmd, cwd):
2838 cmd = '/bin/sh -c ". %s > /dev/null && %s"' % (
2839 environment_script_path, cmd)
2840 return runCmd(cmd, cwd)
2841
2842 # Verify building the C++ example works with CMake
2843 tempdir_cmake = tempfile.mkdtemp(prefix='devtoolqa')
2844 self.track_for_cleanup(tempdir_cmake)
2845
2846 result_cmake = runCmdEnv("which cmake", cwd=tempdir_cmake)
2847 cmake_native = os.path.normpath(result_cmake.output.strip())
2848 self.assertExists(cmake_native)
2849
2850 runCmdEnv('cmake %s' % cpp_example_src, cwd=tempdir_cmake)
2851 runCmdEnv('cmake --build %s' % tempdir_cmake, cwd=tempdir_cmake)
2852
2853 # Verify the printed note really referres to a cmake executable
2854 cmake_native_code = ""
2855 for line in result_init.output.splitlines():
2856 m = re.search(r'"cmake.cmakePath": "(.*)"', line)
2857 if m:
2858 cmake_native_code = m.group(1)
2859 break
2860 self.assertExists(cmake_native_code)
2861 self.assertEqual(cmake_native, cmake_native_code)
2862
2863 # Verify building the C++ example works with Meson
2864 tempdir_meson = tempfile.mkdtemp(prefix='devtoolqa')
2865 self.track_for_cleanup(tempdir_meson)
2866
2867 result_cmake = runCmdEnv("which meson", cwd=tempdir_meson)
2868 meson_native = os.path.normpath(result_cmake.output.strip())
2869 self.assertExists(meson_native)
2870
2871 runCmdEnv('meson setup %s' % tempdir_meson, cwd=cpp_example_src)
2872 runCmdEnv('meson compile', cwd=tempdir_meson)
2873
2874 def test_devtool_ide_sdk_plugins(self):
2875 """Test that devtool ide-sdk can use plugins from other layers."""
2876
2877 # We need a workspace layer and a modified recipe (but no image)
2878 modified_recipe_name = "meson-example"
2879 modified_build_file = "meson.build"
2880 testimage = "oe-selftest-image"
2881 shared_recipe_name = "cmake-example"
2882
2883 self._check_workspace()
2884 self._write_bb_config([modified_recipe_name])
2885 tempdir = self._devtool_ide_sdk_recipe(
2886 modified_recipe_name, modified_build_file, None)
2887
2888 IDE_RE = re.compile(r'.*--ide \{(.*)\}.*')
2889
2890 def get_ides_from_help(help_str):
2891 m = IDE_RE.search(help_str)
2892 return m.group(1).split(',')
2893
2894 # verify the default plugins are available but the foo plugin is not
2895 result = runCmd('devtool ide-sdk -h')
2896 found_ides = get_ides_from_help(result.output)
2897 self.assertIn('code', found_ides)
2898 self.assertIn('none', found_ides)
2899 self.assertNotIn('foo', found_ides)
2900
2901 shared_config_file = os.path.join(tempdir, 'shared-config.txt')
2902 shared_config_str = 'Dummy shared IDE config'
2903 modified_config_file = os.path.join(tempdir, 'modified-config.txt')
2904 modified_config_str = 'Dummy modified IDE config'
2905
2906 # Generate a foo plugin in the workspace layer
2907 plugin_dir = os.path.join(
2908 self.workspacedir, 'lib', 'devtool', 'ide_plugins')
2909 os.makedirs(plugin_dir)
2910 plugin_code = 'from devtool.ide_plugins import IdeBase\n\n'
2911 plugin_code += 'class IdeFoo(IdeBase):\n'
2912 plugin_code += ' def setup_shared_sysroots(self, shared_env):\n'
2913 plugin_code += ' with open("%s", "w") as config_file:\n' % shared_config_file
2914 plugin_code += ' config_file.write("%s")\n\n' % shared_config_str
2915 plugin_code += ' def setup_modified_recipe(self, args, image_recipe, modified_recipe):\n'
2916 plugin_code += ' with open("%s", "w") as config_file:\n' % modified_config_file
2917 plugin_code += ' config_file.write("%s")\n\n' % modified_config_str
2918 plugin_code += 'def register_ide_plugin(ide_plugins):\n'
2919 plugin_code += ' ide_plugins["foo"] = IdeFoo\n'
2920
2921 plugin_py = os.path.join(plugin_dir, 'ide_foo.py')
2922 with open(plugin_py, 'w') as plugin_file:
2923 plugin_file.write(plugin_code)
2924
2925 # Verify the foo plugin is available as well
2926 result = runCmd('devtool ide-sdk -h')
2927 found_ides = get_ides_from_help(result.output)
2928 self.assertIn('code', found_ides)
2929 self.assertIn('none', found_ides)
2930 self.assertIn('foo', found_ides)
2931
2932 # Verify the foo plugin generates a shared config
2933 result = runCmd(
2934 'devtool ide-sdk -m shared --skip-bitbake --ide foo %s' % shared_recipe_name)
2935 with open(shared_config_file) as shared_config:
2936 shared_config_new = shared_config.read()
2937 self.assertEqual(shared_config_str, shared_config_new)
2938
2939 # Verify the foo plugin generates a modified config
2940 result = runCmd('devtool ide-sdk --skip-bitbake --ide foo %s %s' %
2941 (modified_recipe_name, testimage))
2942 with open(modified_config_file) as modified_config:
2943 modified_config_new = modified_config.read()
2944 self.assertEqual(modified_config_str, modified_config_new)
diff --git a/meta/lib/oeqa/selftest/cases/distrodata.py b/meta/lib/oeqa/selftest/cases/distrodata.py
index e1cfc3b621..ad952c004b 100644
--- a/meta/lib/oeqa/selftest/cases/distrodata.py
+++ b/meta/lib/oeqa/selftest/cases/distrodata.py
@@ -1,11 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
7from oeqa.utils.decorators import testcase
8from oeqa.utils.ftools import write_file
9 8
10import oe.recipeutils 9import oe.recipeutils
11 10
@@ -18,7 +17,7 @@ class Distrodata(OESelftestTestCase):
18 Product: oe-core 17 Product: oe-core
19 Author: Alexander Kanavin <alex.kanavin@gmail.com> 18 Author: Alexander Kanavin <alex.kanavin@gmail.com>
20 """ 19 """
21 feature = 'LICENSE_FLAGS_WHITELIST += " commercial"\n' 20 feature = 'LICENSE_FLAGS_ACCEPTED += " commercial"\n'
22 self.write_config(feature) 21 self.write_config(feature)
23 22
24 pkgs = oe.recipeutils.get_recipe_upgrade_status() 23 pkgs = oe.recipeutils.get_recipe_upgrade_status()
@@ -49,21 +48,21 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
49 Author: Alexander Kanavin <alex.kanavin@gmail.com> 48 Author: Alexander Kanavin <alex.kanavin@gmail.com>
50 """ 49 """
51 def is_exception(pkg): 50 def is_exception(pkg):
52 exceptions = ["packagegroup-", "initramfs-", "systemd-machine-units", "target-sdk-provides-dummy"] 51 exceptions = ["packagegroup-",]
53 for i in exceptions: 52 for i in exceptions:
54 if i in pkg: 53 if i in pkg:
55 return True 54 return True
56 return False 55 return False
57 56
58 def is_maintainer_exception(entry): 57 def is_maintainer_exception(entry):
59 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", 58 exceptions = ["musl", "newlib", "linux-yocto", "linux-dummy", "mesa-gl", "libgfortran", "libx11-compose-data",
60 "cve-update-db-native"] 59 "cve-update-nvd2-native",]
61 for i in exceptions: 60 for i in exceptions:
62 if i in entry: 61 if i in entry:
63 return True 62 return True
64 return False 63 return False
65 64
66 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_WHITELIST += " commercial"\nPARSE_ALL_RECIPES = "1"\n' 65 feature = 'require conf/distro/include/maintainers.inc\nLICENSE_FLAGS_ACCEPTED += " commercial"\nPARSE_ALL_RECIPES = "1"\nPACKAGE_CLASSES = "package_ipk package_deb package_rpm"\n'
67 self.write_config(feature) 66 self.write_config(feature)
68 67
69 with bb.tinfoil.Tinfoil() as tinfoil: 68 with bb.tinfoil.Tinfoil() as tinfoil:
@@ -74,7 +73,7 @@ but their recipes claim otherwise by setting UPSTREAM_VERSION_UNKNOWN. Please re
74 73
75 missing_recipes = [] 74 missing_recipes = []
76 recipes = [] 75 recipes = []
77 prefix = "RECIPE_MAINTAINER_pn-" 76 prefix = "RECIPE_MAINTAINER:pn-"
78 77
79 # We could have used all_recipes() here, but this method will find 78 # We could have used all_recipes() here, but this method will find
80 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files 79 # every recipe if we ever move to setting RECIPE_MAINTAINER in recipe files
diff --git a/meta/lib/oeqa/selftest/cases/efibootpartition.py b/meta/lib/oeqa/selftest/cases/efibootpartition.py
index a61cf9bcb3..fa74103dec 100644
--- a/meta/lib/oeqa/selftest/cases/efibootpartition.py
+++ b/meta/lib/oeqa/selftest/cases/efibootpartition.py
@@ -5,42 +5,29 @@
5# SPDX-License-Identifier: MIT 5# SPDX-License-Identifier: MIT
6# 6#
7 7
8import re
9
10from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import bitbake, runqemu, get_bb_var 9from oeqa.utils.commands import bitbake, runqemu
10from oeqa.core.decorator.data import skipIfNotMachine
11import oe.types
12 12
13class GenericEFITest(OESelftestTestCase): 13class GenericEFITest(OESelftestTestCase):
14 """EFI booting test class""" 14 """EFI booting test class"""
15 @skipIfNotMachine("qemux86-64", "test is qemux86-64 specific currently")
16 def test_boot_efi(self):
17 cmd = "runqemu nographic serial wic ovmf"
18 if oe.types.qemu_use_kvm(self.td.get('QEMU_USE_KVM', 0), self.td["TARGET_ARCH"]):
19 cmd += " kvm"
20 image = "core-image-minimal"
15 21
16 cmd_common = "runqemu nographic serial wic ovmf" 22 self.write_config("""
17 efi_provider = "systemd-boot" 23EFI_PROVIDER = "systemd-boot"
18 image = "core-image-minimal" 24IMAGE_FSTYPES:pn-%s:append = " wic"
19 machine = "qemux86-64" 25MACHINE_FEATURES:append = " efi"
20 recipes_built = False
21
22 @classmethod
23 def setUpLocal(self):
24 super(GenericEFITest, self).setUpLocal(self)
25
26 self.write_config(self,
27"""
28EFI_PROVIDER = "%s"
29IMAGE_FSTYPES_pn-%s_append = " wic"
30MACHINE = "%s"
31MACHINE_FEATURES_append = " efi"
32WKS_FILE = "efi-bootdisk.wks.in" 26WKS_FILE = "efi-bootdisk.wks.in"
33IMAGE_INSTALL_append = " grub-efi systemd-boot kernel-image-bzimage" 27IMAGE_INSTALL:append = " grub-efi systemd-boot kernel-image-bzimage"
34""" 28"""
35% (self.efi_provider, self.image, self.machine)) 29% (image))
36 if not self.recipes_built:
37 bitbake("ovmf")
38 bitbake(self.image)
39 self.recipes_built = True
40 30
41 @classmethod 31 bitbake(image + " ovmf")
42 def test_boot_efi(self): 32 with runqemu(image, ssh=False, launch_cmd=cmd) as qemu:
43 """Test generic boot partition with qemu"""
44 cmd = "%s %s" % (self.cmd_common, self.machine)
45 with runqemu(self.image, ssh=False, launch_cmd=cmd) as qemu:
46 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) 33 self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd)
diff --git a/meta/lib/oeqa/selftest/cases/eSDK.py b/meta/lib/oeqa/selftest/cases/esdk.py
index 862849af35..9f5de2cde7 100644
--- a/meta/lib/oeqa/selftest/cases/eSDK.py
+++ b/meta/lib/oeqa/selftest/cases/esdk.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,7 +10,7 @@ import os
8import glob 10import glob
9import time 11import time
10from oeqa.selftest.case import OESelftestTestCase 12from oeqa.selftest.case import OESelftestTestCase
11from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars 13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars
12 14
13class oeSDKExtSelfTest(OESelftestTestCase): 15class oeSDKExtSelfTest(OESelftestTestCase):
14 """ 16 """
@@ -63,7 +65,7 @@ class oeSDKExtSelfTest(OESelftestTestCase):
63 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) 65 cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA)
64 66
65 sstate_config=""" 67 sstate_config="""
66SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 68ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
67SSTATE_MIRRORS = "file://.* file://%s/PATH" 69SSTATE_MIRRORS = "file://.* file://%s/PATH"
68CORE_IMAGE_EXTRA_INSTALL = "perl" 70CORE_IMAGE_EXTRA_INSTALL = "perl"
69 """ % sstate_dir 71 """ % sstate_dir
@@ -91,7 +93,7 @@ CORE_IMAGE_EXTRA_INSTALL = "perl"
91 93
92 # Configure eSDK to use sstate mirror from poky 94 # Configure eSDK to use sstate mirror from poky
93 sstate_config=""" 95 sstate_config="""
94SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" 96ESDK_LOCALCONF_ALLOW = "SSTATE_MIRRORS"
95SSTATE_MIRRORS = "file://.* file://%s/PATH" 97SSTATE_MIRRORS = "file://.* file://%s/PATH"
96 """ % bb_vars["SSTATE_DIR"] 98 """ % bb_vars["SSTATE_DIR"]
97 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: 99 with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f:
@@ -100,7 +102,7 @@ SSTATE_MIRRORS = "file://.* file://%s/PATH"
100 @classmethod 102 @classmethod
101 def tearDownClass(cls): 103 def tearDownClass(cls):
102 for i in range(0, 10): 104 for i in range(0, 10):
103 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')): 105 if os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'bitbake.lock')) or os.path.exists(os.path.join(cls.tmpdir_eSDKQA, 'cache/hashserv.db-wal')):
104 time.sleep(1) 106 time.sleep(1)
105 else: 107 else:
106 break 108 break
diff --git a/meta/lib/oeqa/selftest/cases/externalsrc.py b/meta/lib/oeqa/selftest/cases/externalsrc.py
new file mode 100644
index 0000000000..1d800dc82c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/externalsrc.py
@@ -0,0 +1,44 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9import tempfile
10
11from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import get_bb_var, runCmd
13
14class ExternalSrc(OESelftestTestCase):
15 # test that srctree_hash_files does not crash
16 # we should be actually checking do_compile[file-checksums] but oeqa currently does not support it
17 # so we check only that a recipe with externalsrc can be parsed
18 def test_externalsrc_srctree_hash_files(self):
19 test_recipe = "git-submodule-test"
20 git_url = "git://git.yoctoproject.org/git-submodule-test"
21 externalsrc_dir = tempfile.TemporaryDirectory(prefix="externalsrc").name
22
23 self.write_config(
24 """
25INHERIT += "externalsrc"
26EXTERNALSRC:pn-%s = "%s"
27""" % (test_recipe, externalsrc_dir)
28 )
29
30 # test with git without submodules
31 runCmd('git clone %s %s' % (git_url, externalsrc_dir))
32 os.unlink(externalsrc_dir + "/.gitmodules")
33 open(".gitmodules", 'w').close() # local file .gitmodules in cwd should not affect externalsrc parsing
34 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
35 os.unlink(".gitmodules")
36
37 # test with git with submodules
38 runCmd('git checkout .gitmodules', cwd=externalsrc_dir)
39 runCmd('git submodule update --init --recursive', cwd=externalsrc_dir)
40 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
41
42 # test without git
43 shutil.rmtree(os.path.join(externalsrc_dir, ".git"))
44 self.assertEqual(get_bb_var("S", test_recipe), externalsrc_dir, msg = "S does not equal to EXTERNALSRC")
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index 76cbadf2ff..44099176fc 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -1,7 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import tempfile
8import textwrap
9import bb.tinfoil
5import oe.path 10import oe.path
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
@@ -21,8 +26,8 @@ class Fetch(OESelftestTestCase):
21 # No mirrors, should use git to fetch successfully 26 # No mirrors, should use git to fetch successfully
22 features = """ 27 features = """
23DL_DIR = "%s" 28DL_DIR = "%s"
24MIRRORS_forcevariable = "" 29MIRRORS:forcevariable = ""
25PREMIRRORS_forcevariable = "" 30PREMIRRORS:forcevariable = ""
26""" % dldir 31""" % dldir
27 self.write_config(features) 32 self.write_config(features)
28 oe.path.remove(dldir, recurse=True) 33 oe.path.remove(dldir, recurse=True)
@@ -31,9 +36,10 @@ PREMIRRORS_forcevariable = ""
31 # No mirrors and broken git, should fail 36 # No mirrors and broken git, should fail
32 features = """ 37 features = """
33DL_DIR = "%s" 38DL_DIR = "%s"
39SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
34GIT_PROXY_COMMAND = "false" 40GIT_PROXY_COMMAND = "false"
35MIRRORS_forcevariable = "" 41MIRRORS:forcevariable = ""
36PREMIRRORS_forcevariable = "" 42PREMIRRORS:forcevariable = ""
37""" % dldir 43""" % dldir
38 self.write_config(features) 44 self.write_config(features)
39 oe.path.remove(dldir, recurse=True) 45 oe.path.remove(dldir, recurse=True)
@@ -43,9 +49,62 @@ PREMIRRORS_forcevariable = ""
43 # Broken git but a specific mirror 49 # Broken git but a specific mirror
44 features = """ 50 features = """
45DL_DIR = "%s" 51DL_DIR = "%s"
52SRC_URI:pn-dbus-wait = "git://git.yoctoproject.org/dbus-wait;branch=master;protocol=git"
46GIT_PROXY_COMMAND = "false" 53GIT_PROXY_COMMAND = "false"
47MIRRORS_forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/" 54MIRRORS:forcevariable = "git://.*/.* http://downloads.yoctoproject.org/mirror/sources/"
48""" % dldir 55""" % dldir
49 self.write_config(features) 56 self.write_config(features)
50 oe.path.remove(dldir, recurse=True) 57 oe.path.remove(dldir, recurse=True)
51 bitbake("dbus-wait -c fetch -f") 58 bitbake("dbus-wait -c fetch -f")
59
60
61class Dependencies(OESelftestTestCase):
62 def write_recipe(self, content, tempdir):
63 f = os.path.join(tempdir, "test.bb")
64 with open(f, "w") as fd:
65 fd.write(content)
66 return f
67
68 def test_dependencies(self):
69 """
70 Verify that the correct dependencies are generated for specific SRC_URI entries.
71 """
72
73 with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory(prefix="selftest-fetch") as tempdir:
74 tinfoil.prepare(config_only=False, quiet=2)
75
76 r = """
77 LICENSE="CLOSED"
78 SRC_URI="http://example.com/tarball.zip"
79 """
80 f = self.write_recipe(textwrap.dedent(r), tempdir)
81 d = tinfoil.parse_recipe_file(f)
82 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
83 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends"))
84
85 # Verify that the downloadfilename overrides the URI
86 r = """
87 LICENSE="CLOSED"
88 SRC_URI="https://example.com/tarball;downloadfilename=something.zip"
89 """
90 f = self.write_recipe(textwrap.dedent(r), tempdir)
91 d = tinfoil.parse_recipe_file(f)
92 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
93 self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "")
94
95 r = """
96 LICENSE="CLOSED"
97 SRC_URI="ftp://example.com/tarball.lz"
98 """
99 f = self.write_recipe(textwrap.dedent(r), tempdir)
100 d = tinfoil.parse_recipe_file(f)
101 self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends"))
102 self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends"))
103
104 r = """
105 LICENSE="CLOSED"
106 SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
107 """
108 f = self.write_recipe(textwrap.dedent(r), tempdir)
109 d = tinfoil.parse_recipe_file(f)
110 self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
diff --git a/meta/lib/oeqa/selftest/cases/fitimage.py b/meta/lib/oeqa/selftest/cases/fitimage.py
index 02692de822..347c065377 100644
--- a/meta/lib/oeqa/selftest/cases/fitimage.py
+++ b/meta/lib/oeqa/selftest/cases/fitimage.py
@@ -1,11 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu 8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars
7import os 9import os
8import json
9import re 10import re
10 11
11class FitImageTests(OESelftestTestCase): 12class FitImageTests(OESelftestTestCase):
@@ -32,6 +33,8 @@ KERNEL_CLASSES = " kernel-fitimage "
32# RAM disk variables including load address and entrypoint for kernel and RAM disk 33# RAM disk variables including load address and entrypoint for kernel and RAM disk
33IMAGE_FSTYPES += "cpio.gz" 34IMAGE_FSTYPES += "cpio.gz"
34INITRAMFS_IMAGE = "core-image-minimal" 35INITRAMFS_IMAGE = "core-image-minimal"
36# core-image-minimal is used as initramfs here, drop the rootfs suffix
37IMAGE_NAME_SUFFIX:pn-core-image-minimal = ""
35UBOOT_RD_LOADADDRESS = "0x88000000" 38UBOOT_RD_LOADADDRESS = "0x88000000"
36UBOOT_RD_ENTRYPOINT = "0x88000000" 39UBOOT_RD_ENTRYPOINT = "0x88000000"
37UBOOT_LOADADDRESS = "0x80080000" 40UBOOT_LOADADDRESS = "0x80080000"
@@ -41,15 +44,14 @@ FIT_DESC = "A model description"
41 self.write_config(config) 44 self.write_config(config)
42 45
43 # fitImage is created as part of linux recipe 46 # fitImage is created as part of linux recipe
44 bitbake("virtual/kernel") 47 image = "virtual/kernel"
48 bitbake(image)
49 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'INITRAMFS_IMAGE_NAME', 'KERNEL_FIT_LINK_NAME'], image)
45 50
46 image_type = "core-image-minimal" 51 fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
47 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 52 "fitImage-its-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
48 machine = get_bb_var('MACHINE') 53 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
49 fitimage_its_path = os.path.join(deploy_dir_image, 54 "fitImage-%s-%s" % (bb_vars['INITRAMFS_IMAGE_NAME'], bb_vars['KERNEL_FIT_LINK_NAME']))
50 "fitImage-its-%s-%s-%s" % (image_type, machine, machine))
51 fitimage_path = os.path.join(deploy_dir_image,
52 "fitImage-%s-%s-%s" % (image_type, machine, machine))
53 55
54 self.assertTrue(os.path.exists(fitimage_its_path), 56 self.assertTrue(os.path.exists(fitimage_its_path),
55 "%s image tree source doesn't exist" % (fitimage_its_path)) 57 "%s image tree source doesn't exist" % (fitimage_its_path))
@@ -114,22 +116,22 @@ KERNEL_CLASSES = " kernel-fitimage test-mkimage-wrapper "
114UBOOT_SIGN_ENABLE = "1" 116UBOOT_SIGN_ENABLE = "1"
115FIT_GENERATE_KEYS = "1" 117FIT_GENERATE_KEYS = "1"
116UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys" 118UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
117UBOOT_SIGN_KEYNAME = "oe-selftest" 119UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
120UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
118FIT_SIGN_INDIVIDUAL = "1" 121FIT_SIGN_INDIVIDUAL = "1"
119UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'" 122UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
120""" 123"""
121 self.write_config(config) 124 self.write_config(config)
122 125
123 # fitImage is created as part of linux recipe 126 # fitImage is created as part of linux recipe
124 bitbake("virtual/kernel") 127 image = "virtual/kernel"
128 bitbake(image)
129 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'KERNEL_FIT_LINK_NAME'], image)
125 130
126 image_type = "core-image-minimal" 131 fitimage_its_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
127 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 132 "fitImage-its-%s" % (bb_vars['KERNEL_FIT_LINK_NAME']))
128 machine = get_bb_var('MACHINE') 133 fitimage_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'],
129 fitimage_its_path = os.path.join(deploy_dir_image, 134 "fitImage-%s.bin" % (bb_vars['KERNEL_FIT_LINK_NAME']))
130 "fitImage-its-%s" % (machine,))
131 fitimage_path = os.path.join(deploy_dir_image,
132 "fitImage-%s.bin" % (machine,))
133 135
134 self.assertTrue(os.path.exists(fitimage_its_path), 136 self.assertTrue(os.path.exists(fitimage_its_path),
135 "%s image tree source doesn't exist" % (fitimage_its_path)) 137 "%s image tree source doesn't exist" % (fitimage_its_path))
@@ -173,11 +175,11 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
173 175
174 reqsigvalues_image = { 176 reqsigvalues_image = {
175 'algo': '"sha256,rsa2048"', 177 'algo': '"sha256,rsa2048"',
176 'key-name-hint': '"oe-selftest"', 178 'key-name-hint': '"img-oe-selftest"',
177 } 179 }
178 reqsigvalues_config = { 180 reqsigvalues_config = {
179 'algo': '"sha256,rsa2048"', 181 'algo': '"sha256,rsa2048"',
180 'key-name-hint': '"oe-selftest"', 182 'key-name-hint': '"cfg-oe-selftest"',
181 'sign-images': '"kernel", "fdt"', 183 'sign-images': '"kernel", "fdt"',
182 } 184 }
183 185
@@ -202,7 +204,7 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
202 signed_sections = {} 204 signed_sections = {}
203 for line in result.output.splitlines(): 205 for line in result.output.splitlines():
204 if line.startswith((' Configuration', ' Image')): 206 if line.startswith((' Configuration', ' Image')):
205 in_signed = re.search('\((.*)\)', line).groups()[0] 207 in_signed = re.search(r'\((.*)\)', line).groups()[0]
206 elif re.match('^ *', line) in (' ', ''): 208 elif re.match('^ *', line) in (' ', ''):
207 in_signed = None 209 in_signed = None
208 elif in_signed: 210 elif in_signed:
@@ -215,7 +217,10 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
215 self.assertIn('conf-am335x-boneblack.dtb', signed_sections) 217 self.assertIn('conf-am335x-boneblack.dtb', signed_sections)
216 for signed_section, values in signed_sections.items(): 218 for signed_section, values in signed_sections.items():
217 value = values.get('Sign algo', None) 219 value = values.get('Sign algo', None)
218 self.assertEqual(value, 'sha256,rsa2048:oe-selftest', 'Signature algorithm for %s not expected value' % signed_section) 220 if signed_section.startswith("conf"):
221 self.assertEqual(value, 'sha256,rsa2048:cfg-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
222 else:
223 self.assertEqual(value, 'sha256,rsa2048:img-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
219 value = values.get('Sign value', None) 224 value = values.get('Sign value', None)
220 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section) 225 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
221 226
@@ -231,6 +236,480 @@ UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart comment'"
231 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True) 236 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_assemble_fitimage' % tempdir, ignore_status=True)
232 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work') 237 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
233 238
239 def test_uboot_fit_image(self):
240 """
241 Summary: Check if Uboot FIT image and Image Tree Source
242 (its) are built and the Image Tree Source has the
243 correct fields.
244 Expected: 1. u-boot-fitImage and u-boot-its can be built
245 2. The type, load address, entrypoint address and
246 default values of U-boot image are correct in the
247 Image Tree Source. Not all the fields are tested,
248 only the key fields that wont vary between
249 different architectures.
250 Product: oe-core
251 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
252 based on work by Usama Arif <usama.arif@arm.com>
253 """
254 config = """
255# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
256MACHINE = "qemuarm"
257UBOOT_MACHINE = "am57xx_evm_defconfig"
258SPL_BINARY = "MLO"
259
260# Enable creation of the U-Boot fitImage
261UBOOT_FITIMAGE_ENABLE = "1"
262
263# (U-boot) fitImage properties
264UBOOT_LOADADDRESS = "0x80080000"
265UBOOT_ENTRYPOINT = "0x80080000"
266UBOOT_FIT_DESC = "A model description"
267
268# Enable creation of Kernel fitImage
269KERNEL_IMAGETYPES += " fitImage "
270KERNEL_CLASSES = " kernel-fitimage"
271UBOOT_SIGN_ENABLE = "1"
272FIT_GENERATE_KEYS = "1"
273UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
274UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
275UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
276FIT_SIGN_INDIVIDUAL = "1"
277"""
278 self.write_config(config)
279
280 # The U-Boot fitImage is created as part of the U-Boot recipe
281 bitbake("virtual/bootloader")
282
283 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
284 machine = get_bb_var('MACHINE')
285 fitimage_its_path = os.path.join(deploy_dir_image,
286 "u-boot-its-%s" % (machine,))
287 fitimage_path = os.path.join(deploy_dir_image,
288 "u-boot-fitImage-%s" % (machine,))
289
290 self.assertTrue(os.path.exists(fitimage_its_path),
291 "%s image tree source doesn't exist" % (fitimage_its_path))
292 self.assertTrue(os.path.exists(fitimage_path),
293 "%s FIT image doesn't exist" % (fitimage_path))
294
295 # Check that the type, load address, entrypoint address and default
296 # values for kernel and ramdisk in Image Tree Source are as expected.
297 # The order of fields in the below array is important. Not all the
298 # fields are tested, only the key fields that wont vary between
299 # different architectures.
300 its_field_check = [
301 'description = "A model description";',
302 'type = "standalone";',
303 'load = <0x80080000>;',
304 'entry = <0x80080000>;',
305 'default = "conf";',
306 'loadables = "uboot";',
307 'fdt = "fdt";'
308 ]
309
310 with open(fitimage_its_path) as its_file:
311 field_index = 0
312 for line in its_file:
313 if field_index == len(its_field_check):
314 break
315 if its_field_check[field_index] in line:
316 field_index +=1
317
318 if field_index != len(its_field_check): # if its equal, the test passed
319 self.assertTrue(field_index == len(its_field_check),
320 "Fields in Image Tree Source File %s did not match, error in finding %s"
321 % (fitimage_its_path, its_field_check[field_index]))
322
323 def test_uboot_sign_fit_image(self):
324 """
325 Summary: Check if Uboot FIT image and Image Tree Source
326 (its) are built and the Image Tree Source has the
327 correct fields, in the scenario where the Kernel
328 is also creating/signing it's fitImage.
329 Expected: 1. u-boot-fitImage and u-boot-its can be built
330 2. The type, load address, entrypoint address and
331 default values of U-boot image are correct in the
332 Image Tree Source. Not all the fields are tested,
333 only the key fields that wont vary between
334 different architectures.
335 Product: oe-core
336 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com>
337 based on work by Usama Arif <usama.arif@arm.com>
338 """
339 config = """
340# We need at least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
341MACHINE = "qemuarm"
342UBOOT_MACHINE = "am57xx_evm_defconfig"
343SPL_BINARY = "MLO"
344
345# Enable creation of the U-Boot fitImage
346UBOOT_FITIMAGE_ENABLE = "1"
347
348# (U-boot) fitImage properties
349UBOOT_LOADADDRESS = "0x80080000"
350UBOOT_ENTRYPOINT = "0x80080000"
351UBOOT_FIT_DESC = "A model description"
352KERNEL_IMAGETYPES += " fitImage "
353KERNEL_CLASSES = " kernel-fitimage "
354INHERIT += "test-mkimage-wrapper"
355UBOOT_SIGN_ENABLE = "1"
356FIT_GENERATE_KEYS = "1"
357UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
358UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
359UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
360FIT_SIGN_INDIVIDUAL = "1"
361UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
362"""
363 self.write_config(config)
364
365 # The U-Boot fitImage is created as part of the U-Boot recipe
366 bitbake("virtual/bootloader")
367
368 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
369 machine = get_bb_var('MACHINE')
370 fitimage_its_path = os.path.join(deploy_dir_image,
371 "u-boot-its-%s" % (machine,))
372 fitimage_path = os.path.join(deploy_dir_image,
373 "u-boot-fitImage-%s" % (machine,))
374
375 self.assertTrue(os.path.exists(fitimage_its_path),
376 "%s image tree source doesn't exist" % (fitimage_its_path))
377 self.assertTrue(os.path.exists(fitimage_path),
378 "%s FIT image doesn't exist" % (fitimage_path))
379
380 # Check that the type, load address, entrypoint address and default
381 # values for kernel and ramdisk in Image Tree Source are as expected.
382 # The order of fields in the below array is important. Not all the
383 # fields are tested, only the key fields that wont vary between
384 # different architectures.
385 its_field_check = [
386 'description = "A model description";',
387 'type = "standalone";',
388 'load = <0x80080000>;',
389 'entry = <0x80080000>;',
390 'default = "conf";',
391 'loadables = "uboot";',
392 'fdt = "fdt";'
393 ]
394
395 with open(fitimage_its_path) as its_file:
396 field_index = 0
397 for line in its_file:
398 if field_index == len(its_field_check):
399 break
400 if its_field_check[field_index] in line:
401 field_index +=1
402
403 if field_index != len(its_field_check): # if its equal, the test passed
404 self.assertTrue(field_index == len(its_field_check),
405 "Fields in Image Tree Source File %s did not match, error in finding %s"
406 % (fitimage_its_path, its_field_check[field_index]))
407
408
409 def test_sign_standalone_uboot_fit_image(self):
410 """
411 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
412 created and signed correctly for the scenario where only
413 the U-Boot proper fitImage is being created and signed.
414 Expected: 1) U-Boot its and FIT image are built successfully
415 2) Scanning the its file indicates signing is enabled
416 as requested by SPL_SIGN_ENABLE (using keys generated
417 via UBOOT_FIT_GENERATE_KEYS)
418 3) Dumping the FIT image indicates signature values
419 are present
420 4) Examination of the do_uboot_assemble_fitimage
421 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
422 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
423 Product: oe-core
424 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
425 work by Paul Eggleton <paul.eggleton@microsoft.com> and
426 Usama Arif <usama.arif@arm.com>
427 """
428 config = """
429# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
430# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
431MACHINE = "qemuarm"
432UBOOT_MACHINE = "am57xx_evm_defconfig"
433SPL_BINARY = "MLO"
434# The kernel-fitimage class is a dependency even if we're only
435# creating/signing the U-Boot fitImage
436KERNEL_CLASSES = " kernel-fitimage"
437INHERIT += "test-mkimage-wrapper"
438# Enable creation and signing of the U-Boot fitImage
439UBOOT_FITIMAGE_ENABLE = "1"
440SPL_SIGN_ENABLE = "1"
441SPL_SIGN_KEYNAME = "spl-oe-selftest"
442SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
443UBOOT_DTB_BINARY = "u-boot.dtb"
444UBOOT_ENTRYPOINT = "0x80000000"
445UBOOT_LOADADDRESS = "0x80000000"
446UBOOT_DTB_LOADADDRESS = "0x82000000"
447UBOOT_ARCH = "arm"
448SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
449SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart U-Boot comment'"
450UBOOT_EXTLINUX = "0"
451UBOOT_FIT_GENERATE_KEYS = "1"
452UBOOT_FIT_HASH_ALG = "sha256"
453"""
454 self.write_config(config)
455
456 # The U-Boot fitImage is created as part of the U-Boot recipe
457 bitbake("virtual/bootloader")
458
459 image_type = "core-image-minimal"
460 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
461 machine = get_bb_var('MACHINE')
462 fitimage_its_path = os.path.join(deploy_dir_image,
463 "u-boot-its-%s" % (machine,))
464 fitimage_path = os.path.join(deploy_dir_image,
465 "u-boot-fitImage-%s" % (machine,))
466
467 self.assertTrue(os.path.exists(fitimage_its_path),
468 "%s image tree source doesn't exist" % (fitimage_its_path))
469 self.assertTrue(os.path.exists(fitimage_path),
470 "%s FIT image doesn't exist" % (fitimage_path))
471
472 req_itspaths = [
473 ['/', 'images', 'uboot'],
474 ['/', 'images', 'uboot', 'signature'],
475 ['/', 'images', 'fdt'],
476 ['/', 'images', 'fdt', 'signature'],
477 ]
478
479 itspath = []
480 itspaths = []
481 linect = 0
482 sigs = {}
483 with open(fitimage_its_path) as its_file:
484 linect += 1
485 for line in its_file:
486 line = line.strip()
487 if line.endswith('};'):
488 itspath.pop()
489 elif line.endswith('{'):
490 itspath.append(line[:-1].strip())
491 itspaths.append(itspath[:])
492 elif itspath and itspath[-1] == 'signature':
493 itsdotpath = '.'.join(itspath)
494 if not itsdotpath in sigs:
495 sigs[itsdotpath] = {}
496 if not '=' in line or not line.endswith(';'):
497 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
498 key, value = line.split('=', 1)
499 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
500
501 for reqpath in req_itspaths:
502 if not reqpath in itspaths:
503 self.fail('Missing section in its file: %s' % reqpath)
504
505 reqsigvalues_image = {
506 'algo': '"sha256,rsa2048"',
507 'key-name-hint': '"spl-oe-selftest"',
508 }
509
510 for itspath, values in sigs.items():
511 reqsigvalues = reqsigvalues_image
512 for reqkey, reqvalue in reqsigvalues.items():
513 value = values.get(reqkey, None)
514 if value is None:
515 self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
516 self.assertEqual(value, reqvalue)
517
518 # Dump the image to see if it really got signed
519 bitbake("u-boot-tools-native -c addto_recipe_sysroot")
520 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
521 recipe_sysroot_native = result.output.split('=')[1].strip('"')
522 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
523 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
524 in_signed = None
525 signed_sections = {}
526 for line in result.output.splitlines():
527 if line.startswith((' Image')):
528 in_signed = re.search(r'\((.*)\)', line).groups()[0]
529 elif re.match(' \w', line):
530 in_signed = None
531 elif in_signed:
532 if not in_signed in signed_sections:
533 signed_sections[in_signed] = {}
534 key, value = line.split(':', 1)
535 signed_sections[in_signed][key.strip()] = value.strip()
536 self.assertIn('uboot', signed_sections)
537 self.assertIn('fdt', signed_sections)
538 for signed_section, values in signed_sections.items():
539 value = values.get('Sign algo', None)
540 self.assertEqual(value, 'sha256,rsa2048:spl-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
541 value = values.get('Sign value', None)
542 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
543
544 # Check for SPL_MKIMAGE_SIGN_ARGS
545 result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
546 tempdir = result.output.split('=', 1)[1].strip().strip('')
547 result = runCmd('grep "a smart U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
548 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
549
550 # Check for evidence of test-mkimage-wrapper class
551 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
552 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
553 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
554 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
555
556 def test_sign_cascaded_uboot_fit_image(self):
557 """
558 Summary: Check if U-Boot FIT image and Image Tree Source (its) are
559 created and signed correctly for the scenario where both
560 U-Boot proper and Kernel fitImages are being created and
561 signed.
562 Expected: 1) U-Boot its and FIT image are built successfully
563 2) Scanning the its file indicates signing is enabled
564 as requested by SPL_SIGN_ENABLE (using keys generated
565 via UBOOT_FIT_GENERATE_KEYS)
566 3) Dumping the FIT image indicates signature values
567 are present
568 4) Examination of the do_uboot_assemble_fitimage
569 runfile/logfile indicate that UBOOT_MKIMAGE, UBOOT_MKIMAGE_SIGN
570 and SPL_MKIMAGE_SIGN_ARGS are working as expected.
571 Product: oe-core
572 Author: Klaus Heinrich Kiwi <klaus@linux.vnet.ibm.com> based upon
573 work by Paul Eggleton <paul.eggleton@microsoft.com> and
574 Usama Arif <usama.arif@arm.com>
575 """
576 config = """
577# There's no U-boot deconfig with CONFIG_FIT_SIGNATURE yet, so we need at
578# least CONFIG_SPL_LOAD_FIT and CONFIG_SPL_OF_CONTROL set
579MACHINE = "qemuarm"
580UBOOT_MACHINE = "am57xx_evm_defconfig"
581SPL_BINARY = "MLO"
582# Enable creation and signing of the U-Boot fitImage
583UBOOT_FITIMAGE_ENABLE = "1"
584SPL_SIGN_ENABLE = "1"
585SPL_SIGN_KEYNAME = "spl-cascaded-oe-selftest"
586SPL_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
587UBOOT_DTB_BINARY = "u-boot.dtb"
588UBOOT_ENTRYPOINT = "0x80000000"
589UBOOT_LOADADDRESS = "0x80000000"
590UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
591UBOOT_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded Kernel comment'"
592UBOOT_DTB_LOADADDRESS = "0x82000000"
593UBOOT_ARCH = "arm"
594SPL_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
595SPL_MKIMAGE_SIGN_ARGS = "-c 'a smart cascaded U-Boot comment'"
596UBOOT_EXTLINUX = "0"
597UBOOT_FIT_GENERATE_KEYS = "1"
598UBOOT_FIT_HASH_ALG = "sha256"
599KERNEL_IMAGETYPES += " fitImage "
600KERNEL_CLASSES = " kernel-fitimage "
601INHERIT += "test-mkimage-wrapper"
602UBOOT_SIGN_ENABLE = "1"
603FIT_GENERATE_KEYS = "1"
604UBOOT_SIGN_KEYDIR = "${TOPDIR}/signing-keys"
605UBOOT_SIGN_IMG_KEYNAME = "img-oe-selftest"
606UBOOT_SIGN_KEYNAME = "cfg-oe-selftest"
607FIT_SIGN_INDIVIDUAL = "1"
608"""
609 self.write_config(config)
610
611 # The U-Boot fitImage is created as part of the U-Boot recipe
612 bitbake("virtual/bootloader")
613
614 image_type = "core-image-minimal"
615 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
616 machine = get_bb_var('MACHINE')
617 fitimage_its_path = os.path.join(deploy_dir_image,
618 "u-boot-its-%s" % (machine,))
619 fitimage_path = os.path.join(deploy_dir_image,
620 "u-boot-fitImage-%s" % (machine,))
621
622 self.assertTrue(os.path.exists(fitimage_its_path),
623 "%s image tree source doesn't exist" % (fitimage_its_path))
624 self.assertTrue(os.path.exists(fitimage_path),
625 "%s FIT image doesn't exist" % (fitimage_path))
626
627 req_itspaths = [
628 ['/', 'images', 'uboot'],
629 ['/', 'images', 'uboot', 'signature'],
630 ['/', 'images', 'fdt'],
631 ['/', 'images', 'fdt', 'signature'],
632 ]
633
634 itspath = []
635 itspaths = []
636 linect = 0
637 sigs = {}
638 with open(fitimage_its_path) as its_file:
639 linect += 1
640 for line in its_file:
641 line = line.strip()
642 if line.endswith('};'):
643 itspath.pop()
644 elif line.endswith('{'):
645 itspath.append(line[:-1].strip())
646 itspaths.append(itspath[:])
647 elif itspath and itspath[-1] == 'signature':
648 itsdotpath = '.'.join(itspath)
649 if not itsdotpath in sigs:
650 sigs[itsdotpath] = {}
651 if not '=' in line or not line.endswith(';'):
652 self.fail('Unexpected formatting in %s sigs section line %d:%s' % (fitimage_its_path, linect, line))
653 key, value = line.split('=', 1)
654 sigs[itsdotpath][key.rstrip()] = value.lstrip().rstrip(';')
655
656 for reqpath in req_itspaths:
657 if not reqpath in itspaths:
658 self.fail('Missing section in its file: %s' % reqpath)
659
660 reqsigvalues_image = {
661 'algo': '"sha256,rsa2048"',
662 'key-name-hint': '"spl-cascaded-oe-selftest"',
663 }
664
665 for itspath, values in sigs.items():
666 reqsigvalues = reqsigvalues_image
667 for reqkey, reqvalue in reqsigvalues.items():
668 value = values.get(reqkey, None)
669 if value is None:
670 self.fail('Missing key "%s" in its file signature section %s' % (reqkey, itspath))
671 self.assertEqual(value, reqvalue)
672
673 # Dump the image to see if it really got signed
674 bitbake("u-boot-tools-native -c addto_recipe_sysroot")
675 result = runCmd('bitbake -e u-boot-tools-native | grep ^RECIPE_SYSROOT_NATIVE=')
676 recipe_sysroot_native = result.output.split('=')[1].strip('"')
677 dumpimage_path = os.path.join(recipe_sysroot_native, 'usr', 'bin', 'dumpimage')
678 result = runCmd('%s -l %s' % (dumpimage_path, fitimage_path))
679 in_signed = None
680 signed_sections = {}
681 for line in result.output.splitlines():
682 if line.startswith((' Image')):
683 in_signed = re.search(r'\((.*)\)', line).groups()[0]
684 elif re.match(' \w', line):
685 in_signed = None
686 elif in_signed:
687 if not in_signed in signed_sections:
688 signed_sections[in_signed] = {}
689 key, value = line.split(':', 1)
690 signed_sections[in_signed][key.strip()] = value.strip()
691 self.assertIn('uboot', signed_sections)
692 self.assertIn('fdt', signed_sections)
693 for signed_section, values in signed_sections.items():
694 value = values.get('Sign algo', None)
695 self.assertEqual(value, 'sha256,rsa2048:spl-cascaded-oe-selftest', 'Signature algorithm for %s not expected value' % signed_section)
696 value = values.get('Sign value', None)
697 self.assertEqual(len(value), 512, 'Signature value for section %s not expected length' % signed_section)
698
699 # Check for SPL_MKIMAGE_SIGN_ARGS
700 result = runCmd('bitbake -e virtual/bootloader | grep ^T=')
701 tempdir = result.output.split('=', 1)[1].strip().strip('')
702 result = runCmd('grep "a smart cascaded U-Boot comment" %s/run.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
703 self.assertEqual(result.status, 0, 'SPL_MKIMAGE_SIGN_ARGS value did not get used')
704
705 # Check for evidence of test-mkimage-wrapper class
706 result = runCmd('grep "### uboot-mkimage wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
707 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE did not work')
708 result = runCmd('grep "### uboot-mkimage signing wrapper message" %s/log.do_uboot_assemble_fitimage' % tempdir, ignore_status=True)
709 self.assertEqual(result.status, 0, 'UBOOT_MKIMAGE_SIGN did not work')
710
711
712
234 def test_initramfs_bundle(self): 713 def test_initramfs_bundle(self):
235 """ 714 """
236 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its) 715 Summary: Verifies the content of the initramfs bundle node in the FIT Image Tree Source (its)
@@ -264,9 +743,11 @@ UBOOT_LOADADDRESS = "0x80000000"
264UBOOT_DTB_LOADADDRESS = "0x82000000" 743UBOOT_DTB_LOADADDRESS = "0x82000000"
265UBOOT_ARCH = "arm" 744UBOOT_ARCH = "arm"
266UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000" 745UBOOT_MKIMAGE_DTCOPTS = "-I dts -O dtb -p 2000"
746UBOOT_MKIMAGE_KERNEL_TYPE = "kernel"
267UBOOT_EXTLINUX = "0" 747UBOOT_EXTLINUX = "0"
268FIT_GENERATE_KEYS = "1" 748FIT_GENERATE_KEYS = "1"
269KERNEL_IMAGETYPE_REPLACEMENT = "zImage" 749KERNEL_IMAGETYPE_REPLACEMENT = "zImage"
750FIT_KERNEL_COMP_ALG = "none"
270FIT_HASH_ALG = "sha256" 751FIT_HASH_ALG = "sha256"
271""" 752"""
272 self.write_config(config) 753 self.write_config(config)
@@ -288,9 +769,9 @@ FIT_HASH_ALG = "sha256"
288 769
289 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS')) 770 kernel_load = str(get_bb_var('UBOOT_LOADADDRESS'))
290 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT')) 771 kernel_entry = str(get_bb_var('UBOOT_ENTRYPOINT'))
291 initramfs_bundle_format = str(get_bb_var('KERNEL_IMAGETYPE_REPLACEMENT')) 772 kernel_type = str(get_bb_var('UBOOT_MKIMAGE_KERNEL_TYPE'))
773 kernel_compression = str(get_bb_var('FIT_KERNEL_COMP_ALG'))
292 uboot_arch = str(get_bb_var('UBOOT_ARCH')) 774 uboot_arch = str(get_bb_var('UBOOT_ARCH'))
293 initramfs_bundle = "arch/" + uboot_arch + "/boot/" + initramfs_bundle_format + ".initramfs"
294 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG')) 775 fit_hash_alg = str(get_bb_var('FIT_HASH_ALG'))
295 776
296 its_file = open(fitimage_its_path) 777 its_file = open(fitimage_its_path)
@@ -300,11 +781,11 @@ FIT_HASH_ALG = "sha256"
300 exp_node_lines = [ 781 exp_node_lines = [
301 'kernel-1 {', 782 'kernel-1 {',
302 'description = "Linux kernel";', 783 'description = "Linux kernel";',
303 'data = /incbin/("' + initramfs_bundle + '");', 784 'data = /incbin/("linux.bin");',
304 'type = "kernel";', 785 'type = "' + kernel_type + '";',
305 'arch = "' + uboot_arch + '";', 786 'arch = "' + uboot_arch + '";',
306 'os = "linux";', 787 'os = "linux";',
307 'compression = "none";', 788 'compression = "' + kernel_compression + '";',
308 'load = <' + kernel_load + '>;', 789 'load = <' + kernel_load + '>;',
309 'entry = <' + kernel_entry + '>;', 790 'entry = <' + kernel_entry + '>;',
310 'hash-1 {', 791 'hash-1 {',
diff --git a/meta/lib/oeqa/selftest/cases/gcc.py b/meta/lib/oeqa/selftest/cases/gcc.py
index 3efe15228f..89360178fe 100644
--- a/meta/lib/oeqa/selftest/cases/gcc.py
+++ b/meta/lib/oeqa/selftest/cases/gcc.py
@@ -1,9 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3from oeqa.core.decorator import OETestTag 8from oeqa.core.decorator import OETestTag
4from oeqa.core.case import OEPTestResultTestCase 9from oeqa.core.case import OEPTestResultTestCase
5from oeqa.selftest.case import OESelftestTestCase 10from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 11from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu
7 12
8def parse_values(content): 13def parse_values(content):
9 for i in content: 14 for i in content:
@@ -39,8 +44,13 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
39 self.write_config("\n".join(features)) 44 self.write_config("\n".join(features))
40 45
41 recipe = "gcc-runtime" 46 recipe = "gcc-runtime"
47
48 start_time = time.time()
49
42 bitbake("{} -c check".format(recipe)) 50 bitbake("{} -c check".format(recipe))
43 51
52 end_time = time.time()
53
44 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe) 54 bb_vars = get_bb_vars(["B", "TARGET_SYS"], recipe)
45 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"] 55 builddir, target_sys = bb_vars["B"], bb_vars["TARGET_SYS"]
46 56
@@ -54,7 +64,7 @@ class GccSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
54 64
55 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite 65 ptestsuite = "gcc-{}".format(suite) if suite != "gcc" else suite
56 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite 66 ptestsuite = ptestsuite + "-user" if ssh is None else ptestsuite
57 self.ptest_section(ptestsuite, logfile = logpath) 67 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile = logpath)
58 with open(sumspath, "r") as f: 68 with open(sumspath, "r") as f:
59 for test, result in parse_values(f): 69 for test, result in parse_values(f):
60 self.ptest_result(ptestsuite, test, result) 70 self.ptest_result(ptestsuite, test, result)
@@ -114,37 +124,44 @@ class GccLibItmSelfTest(GccSelfTestBase):
114 self.run_check("libitm") 124 self.run_check("libitm")
115 125
116@OETestTag("toolchain-system") 126@OETestTag("toolchain-system")
127@OETestTag("runqemu")
117class GccCrossSelfTestSystemEmulated(GccSelfTestBase): 128class GccCrossSelfTestSystemEmulated(GccSelfTestBase):
118 def test_cross_gcc(self): 129 def test_cross_gcc(self):
119 self.run_check_emulated("gcc") 130 self.run_check_emulated("gcc")
120 131
121@OETestTag("toolchain-system") 132@OETestTag("toolchain-system")
133@OETestTag("runqemu")
122class GxxCrossSelfTestSystemEmulated(GccSelfTestBase): 134class GxxCrossSelfTestSystemEmulated(GccSelfTestBase):
123 def test_cross_gxx(self): 135 def test_cross_gxx(self):
124 self.run_check_emulated("g++") 136 self.run_check_emulated("g++")
125 137
126@OETestTag("toolchain-system") 138@OETestTag("toolchain-system")
139@OETestTag("runqemu")
127class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase): 140class GccLibAtomicSelfTestSystemEmulated(GccSelfTestBase):
128 def test_libatomic(self): 141 def test_libatomic(self):
129 self.run_check_emulated("libatomic") 142 self.run_check_emulated("libatomic")
130 143
131@OETestTag("toolchain-system") 144@OETestTag("toolchain-system")
145@OETestTag("runqemu")
132class GccLibGompSelfTestSystemEmulated(GccSelfTestBase): 146class GccLibGompSelfTestSystemEmulated(GccSelfTestBase):
133 def test_libgomp(self): 147 def test_libgomp(self):
134 self.run_check_emulated("libgomp") 148 self.run_check_emulated("libgomp")
135 149
136@OETestTag("toolchain-system") 150@OETestTag("toolchain-system")
151@OETestTag("runqemu")
137class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase): 152class GccLibStdCxxSelfTestSystemEmulated(GccSelfTestBase):
138 def test_libstdcxx(self): 153 def test_libstdcxx(self):
139 self.run_check_emulated("libstdc++-v3") 154 self.run_check_emulated("libstdc++-v3")
140 155
141@OETestTag("toolchain-system") 156@OETestTag("toolchain-system")
157@OETestTag("runqemu")
142class GccLibSspSelfTestSystemEmulated(GccSelfTestBase): 158class GccLibSspSelfTestSystemEmulated(GccSelfTestBase):
143 def test_libssp(self): 159 def test_libssp(self):
144 self.check_skip("libssp") 160 self.check_skip("libssp")
145 self.run_check_emulated("libssp") 161 self.run_check_emulated("libssp")
146 162
147@OETestTag("toolchain-system") 163@OETestTag("toolchain-system")
164@OETestTag("runqemu")
148class GccLibItmSelfTestSystemEmulated(GccSelfTestBase): 165class GccLibItmSelfTestSystemEmulated(GccSelfTestBase):
149 def test_libitm(self): 166 def test_libitm(self):
150 self.check_skip("libitm") 167 self.check_skip("libitm")
diff --git a/meta/lib/oeqa/selftest/cases/gdbserver.py b/meta/lib/oeqa/selftest/cases/gdbserver.py
new file mode 100644
index 0000000000..9da97ae780
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gdbserver.py
@@ -0,0 +1,67 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import time
8import tempfile
9import shutil
10import concurrent.futures
11
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars , runqemu, runCmd
14
15class GdbServerTest(OESelftestTestCase):
16 def test_gdb_server(self):
17 target_arch = self.td["TARGET_ARCH"]
18 target_sys = self.td["TARGET_SYS"]
19
20 features = """
21IMAGE_GEN_DEBUGFS = "1"
22IMAGE_FSTYPES_DEBUGFS = "tar.bz2"
23CORE_IMAGE_EXTRA_INSTALL = "gdbserver"
24 """
25 self.write_config(features)
26
27 gdb_recipe = "gdb-cross-" + target_arch
28 gdb_binary = target_sys + "-gdb"
29
30 bitbake("core-image-minimal %s:do_addto_recipe_sysroot" % gdb_recipe)
31
32 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", gdb_recipe)
33 r = runCmd("%s --version" % gdb_binary, native_sysroot=native_sysroot, target_sys=target_sys)
34 self.assertEqual(r.status, 0)
35 self.assertIn("GNU gdb", r.output)
36 image = 'core-image-minimal'
37 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
38
39 with tempfile.TemporaryDirectory(prefix="debugfs-") as debugfs:
40 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
41 shutil.unpack_archive(filename, debugfs)
42 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.tar.bz2" % bb_vars['IMAGE_LINK_NAME'])
43 shutil.unpack_archive(filename, debugfs)
44
45 with runqemu("core-image-minimal", runqemuparams="nographic") as qemu:
46 status, output = qemu.run_serial("kmod --help")
47 self.assertIn("modprobe", output)
48
49 with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
50 def run_gdb():
51 for _ in range(5):
52 time.sleep(2)
53 cmd = "%s --batch -ex 'set sysroot %s' -ex \"target extended-remote %s:9999\" -ex \"info line kmod_help\"" % (gdb_binary, debugfs, qemu.ip)
54 self.logger.warning("starting gdb %s" % cmd)
55 r = runCmd(cmd, native_sysroot=native_sysroot, target_sys=target_sys)
56 self.assertEqual(0, r.status)
57 line_re = r"Line \d+ of \"/usr/src/debug/kmod/.*/tools/kmod.c\" starts at address 0x[0-9A-Fa-f]+ <kmod_help>"
58 self.assertRegex(r.output, line_re)
59 break
60 else:
61 self.fail("Timed out connecting to gdb")
62 future = executor.submit(run_gdb)
63
64 status, output = qemu.run_serial("gdbserver --once :9999 kmod --help")
65 self.assertEqual(status, 1)
66 # The future either returns None, or raises an exception
67 future.result()
diff --git a/meta/lib/oeqa/selftest/cases/gitarchivetests.py b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
new file mode 100644
index 0000000000..71382089c1
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/gitarchivetests.py
@@ -0,0 +1,136 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
10lib_path = basepath + '/scripts/lib'
11sys.path = sys.path + [lib_path]
12import oeqa.utils.gitarchive as ga
13from oeqa.utils.git import GitError
14import tempfile
15import shutil
16import scriptutils
17import logging
18from oeqa.selftest.case import OESelftestTestCase
19
20logger = scriptutils.logger_create('resulttool')
21
22def create_fake_repository(commit, tag_list=[], add_remote=True):
23 """ Create a testing git directory
24
25 Initialize a simple git repository with one initial commit, and as many
26 tags on this commit as listed in tag_list
27 Returns both git directory path and gitarchive git object
28 If commit is true, fake data will be commited, otherwise it will stay in staging area
29 If commit is true and tag_lsit is non empty, all tags in tag_list will be
30 created on the initial commit
31 Fake remote will also be added to make git ls-remote work
32 """
33 fake_data_file = "fake_data.txt"
34 tempdir = tempfile.mkdtemp(prefix='fake_results.')
35 repo = ga.init_git_repo(tempdir, False, False, logger)
36 if add_remote:
37 repo.run_cmd(["remote", "add", "origin", "."])
38 with open(os.path.join(tempdir, fake_data_file), "w") as fake_data:
39 fake_data.write("Fake data")
40 if commit:
41 repo.run_cmd(["add", fake_data_file])
42 repo.run_cmd(["commit", "-m", "\"Add fake data\""])
43 for tag in tag_list:
44 repo.run_cmd(["tag", tag])
45
46 return tempdir, repo
47
48def delete_fake_repository(path):
49 shutil.rmtree(path)
50
51def tag_exists(git_obj, target_tag):
52 for tag in git_obj.run_cmd(["tag"]).splitlines():
53 if target_tag == tag:
54 return True
55 return False
56
57class GitArchiveTests(OESelftestTestCase):
58 TEST_BRANCH="main"
59 TEST_COMMIT="0f7d5df"
60 TEST_COMMIT_COUNT="42"
61
62 @classmethod
63 def setUpClass(cls):
64 super().setUpClass()
65 cls.log = logging.getLogger('gitarchivetests')
66 cls.log.setLevel(logging.DEBUG)
67
68 def test_create_first_test_tag(self):
69 path, git_obj = create_fake_repository(False)
70 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
71 target_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
72
73 ga.gitarchive(path, path, True, False,
74 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
75 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
76 'Test run #{tag_number} of {branch}:{commit}', '',
77 [], [], False, keywords, logger)
78 self.assertTrue(tag_exists(git_obj, target_tag), msg=f"Tag {target_tag} has not been created")
79 delete_fake_repository(path)
80
81 def test_create_second_test_tag(self):
82 first_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/0"
83 second_tag = f"{self.TEST_BRANCH}/{self.TEST_COMMIT_COUNT}-g{self.TEST_COMMIT}/1"
84 keywords = {'commit': self.TEST_COMMIT, 'branch': self.TEST_BRANCH, "commit_count": self.TEST_COMMIT_COUNT}
85
86 path, git_obj = create_fake_repository(True, [first_tag])
87 ga.gitarchive(path, path, True, False,
88 "Results of {branch}:{commit}", "branch: {branch}\ncommit: {commit}", "{branch}",
89 False, "{branch}/{commit_count}-g{commit}/{tag_number}",
90 'Test run #{tag_number} of {branch}:{commit}', '',
91 [], [], False, keywords, logger)
92 self.assertTrue(tag_exists(git_obj, second_tag), msg=f"Second tag {second_tag} has not been created")
93 delete_fake_repository(path)
94
95 def test_get_revs_on_branch(self):
96 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
97 tag_name = "{branch}/{commit_number}-g{commit}/{tag_number}"
98
99 path, git_obj = create_fake_repository(True, fake_tags_list)
100 revs = ga.get_test_revs(logger, git_obj, tag_name, branch="main")
101 self.assertEqual(len(revs), 1)
102 self.assertEqual(revs[0].commit, "0f7d5df")
103 self.assertEqual(len(revs[0].tags), 2)
104 self.assertEqual(revs[0].tags, ['main/10-g0f7d5df/0', 'main/10-g0f7d5df/1'])
105 delete_fake_repository(path)
106
107 def test_get_tags_without_valid_remote(self):
108 url = 'git://git.yoctoproject.org/poky'
109 path, git_obj = create_fake_repository(False, None, False)
110
111 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
112 """Test for some well established tags (released tags)"""
113 self.assertIn("yocto-4.0", tags)
114 self.assertIn("yocto-4.1", tags)
115 self.assertIn("yocto-4.2", tags)
116 delete_fake_repository(path)
117
118 def test_get_tags_with_only_local_tag(self):
119 fake_tags_list=["main/10-g0f7d5df/0", "main/10-g0f7d5df/1", "foo/20-g2468f5d/0"]
120 path, git_obj = create_fake_repository(True, fake_tags_list, False)
121
122 """No remote is configured and no url is passed: get_tags must fall
123 back to local tags
124 """
125 tags = ga.get_tags(git_obj, self.log)
126 self.assertCountEqual(tags, fake_tags_list)
127 delete_fake_repository(path)
128
129 def test_get_tags_without_valid_remote_and_wrong_url(self):
130 url = 'git://git.foo.org/bar'
131 path, git_obj = create_fake_repository(False, None, False)
132
133 """Test for some well established tags (released tags)"""
134 with self.assertRaises(GitError):
135 tags = ga.get_tags(git_obj, self.log, pattern="yocto-*", url=url)
136 delete_fake_repository(path)
diff --git a/meta/lib/oeqa/selftest/cases/glibc.py b/meta/lib/oeqa/selftest/cases/glibc.py
index c687f6ef93..bd56b2f6e7 100644
--- a/meta/lib/oeqa/selftest/cases/glibc.py
+++ b/meta/lib/oeqa/selftest/cases/glibc.py
@@ -1,10 +1,15 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
7import time
3import contextlib 8import contextlib
4from oeqa.core.decorator import OETestTag 9from oeqa.core.decorator import OETestTag
5from oeqa.core.case import OEPTestResultTestCase 10from oeqa.core.case import OEPTestResultTestCase
6from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runqemu, Command 12from oeqa.utils.commands import bitbake, get_bb_var, runqemu
8from oeqa.utils.nfs import unfs_server 13from oeqa.utils.nfs import unfs_server
9 14
10def parse_values(content): 15def parse_values(content):
@@ -24,16 +29,20 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
24 features.append('TOOLCHAIN_TEST_HOST_USER = "root"') 29 features.append('TOOLCHAIN_TEST_HOST_USER = "root"')
25 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"') 30 features.append('TOOLCHAIN_TEST_HOST_PORT = "22"')
26 # force single threaded test execution 31 # force single threaded test execution
27 features.append('EGLIBCPARALLELISM_task-check_pn-glibc-testsuite = "PARALLELMFLAGS="-j1""') 32 features.append('EGLIBCPARALLELISM:task-check:pn-glibc-testsuite = "PARALLELMFLAGS="-j1""')
28 self.write_config("\n".join(features)) 33 self.write_config("\n".join(features))
29 34
35 start_time = time.time()
36
30 bitbake("glibc-testsuite -c check") 37 bitbake("glibc-testsuite -c check")
31 38
39 end_time = time.time()
40
32 builddir = get_bb_var("B", "glibc-testsuite") 41 builddir = get_bb_var("B", "glibc-testsuite")
33 42
34 ptestsuite = "glibc-user" if ssh is None else "glibc" 43 ptestsuite = "glibc-user" if ssh is None else "glibc"
35 self.ptest_section(ptestsuite) 44 self.ptest_section(ptestsuite, duration = int(end_time - start_time))
36 with open(os.path.join(builddir, "tests.sum"), "r") as f: 45 with open(os.path.join(builddir, "tests.sum"), "r", errors='replace') as f:
37 for test, result in parse_values(f): 46 for test, result in parse_values(f):
38 self.ptest_result(ptestsuite, test, result) 47 self.ptest_result(ptestsuite, test, result)
39 48
@@ -41,7 +50,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
41 with contextlib.ExitStack() as s: 50 with contextlib.ExitStack() as s:
42 # use the base work dir, as the nfs mount, since the recipe directory may not exist 51 # use the base work dir, as the nfs mount, since the recipe directory may not exist
43 tmpdir = get_bb_var("BASE_WORKDIR") 52 tmpdir = get_bb_var("BASE_WORKDIR")
44 nfsport, mountport = s.enter_context(unfs_server(tmpdir)) 53 nfsport, mountport = s.enter_context(unfs_server(tmpdir, udp = False))
45 54
46 # build core-image-minimal with required packages 55 # build core-image-minimal with required packages
47 default_installed_packages = [ 56 default_installed_packages = [
@@ -61,7 +70,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
61 bitbake("core-image-minimal") 70 bitbake("core-image-minimal")
62 71
63 # start runqemu 72 # start runqemu
64 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic")) 73 qemu = s.enter_context(runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 1024"))
65 74
66 # validate that SSH is working 75 # validate that SSH is working
67 status, _ = qemu.run("uname") 76 status, _ = qemu.run("uname")
@@ -70,7 +79,7 @@ class GlibcSelfTestBase(OESelftestTestCase, OEPTestResultTestCase):
70 # setup nfs mount 79 # setup nfs mount
71 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0: 80 if qemu.run("mkdir -p \"{0}\"".format(tmpdir))[0] != 0:
72 raise Exception("Failed to setup NFS mount directory on target") 81 raise Exception("Failed to setup NFS mount directory on target")
73 mountcmd = "mount -o noac,nfsvers=3,port={0},udp,mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir) 82 mountcmd = "mount -o noac,nfsvers=3,port={0},mountport={1} \"{2}:{3}\" \"{3}\"".format(nfsport, mountport, qemu.server_ip, tmpdir)
74 status, output = qemu.run(mountcmd) 83 status, output = qemu.run(mountcmd)
75 if status != 0: 84 if status != 0:
76 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output))) 85 raise Exception("Failed to setup NFS mount on target ({})".format(repr(output)))
@@ -83,6 +92,7 @@ class GlibcSelfTest(GlibcSelfTestBase):
83 self.run_check() 92 self.run_check()
84 93
85@OETestTag("toolchain-system") 94@OETestTag("toolchain-system")
95@OETestTag("runqemu")
86class GlibcSelfTestSystemEmulated(GlibcSelfTestBase): 96class GlibcSelfTestSystemEmulated(GlibcSelfTestBase):
87 def test_glibc(self): 97 def test_glibc(self):
88 self.run_check_emulated() 98 self.run_check_emulated()
diff --git a/meta/lib/oeqa/selftest/cases/gotoolchain.py b/meta/lib/oeqa/selftest/cases/gotoolchain.py
index 4fc3605f42..ee2cf4b09a 100644
--- a/meta/lib/oeqa/selftest/cases/gotoolchain.py
+++ b/meta/lib/oeqa/selftest/cases/gotoolchain.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -50,6 +52,9 @@ class oeGoToolchainSelfTest(OESelftestTestCase):
50 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name) 52 cmd = "cd %s/src/%s/%s; " % (self.go_path, proj, name)
51 cmd = cmd + ". %s; " % self.env_SDK 53 cmd = cmd + ". %s; " % self.env_SDK
52 cmd = cmd + "export GOPATH=%s; " % self.go_path 54 cmd = cmd + "export GOPATH=%s; " % self.go_path
55 cmd = cmd + "export GOFLAGS=-modcacherw; "
56 cmd = cmd + "export CGO_ENABLED=1; "
57 cmd = cmd + "export GOPROXY=https://proxy.golang.org,direct; "
53 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd 58 cmd = cmd + "${CROSS_COMPILE}go %s" % gocmd
54 return runCmd(cmd).status 59 return runCmd(cmd).status
55 60
diff --git a/meta/lib/oeqa/selftest/cases/image_typedep.py b/meta/lib/oeqa/selftest/cases/image_typedep.py
index 52e1080f13..17c98baf14 100644
--- a/meta/lib/oeqa/selftest/cases/image_typedep.py
+++ b/meta/lib/oeqa/selftest/cases/image_typedep.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,7 @@ from oeqa.utils.commands import bitbake
9 11
10class ImageTypeDepTests(OESelftestTestCase): 12class ImageTypeDepTests(OESelftestTestCase):
11 13
12 # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that 14 # Verify that when specifying a IMAGE_TYPEDEP: of the form "foo.bar" that
13 # the conversion type bar gets added as a dep as well 15 # the conversion type bar gets added as a dep as well
14 def test_conversion_typedep_added(self): 16 def test_conversion_typedep_added(self):
15 17
@@ -22,7 +24,7 @@ LICENSE = "MIT"
22IMAGE_FSTYPES = "testfstype" 24IMAGE_FSTYPES = "testfstype"
23 25
24IMAGE_TYPES_MASKED += "testfstype" 26IMAGE_TYPES_MASKED += "testfstype"
25IMAGE_TYPEDEP_testfstype = "tar.bz2" 27IMAGE_TYPEDEP:testfstype = "tar.bz2"
26 28
27inherit image 29inherit image
28 30
diff --git a/meta/lib/oeqa/selftest/cases/imagefeatures.py b/meta/lib/oeqa/selftest/cases/imagefeatures.py
index 6723a8198f..dc88c222bd 100644
--- a/meta/lib/oeqa/selftest/cases/imagefeatures.py
+++ b/meta/lib/oeqa/selftest/cases/imagefeatures.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu 8from oeqa.core.decorator import OETestTag
9from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 10from oeqa.utils.sshcontrol import SSHControl
8import glob 11import glob
9import os 12import os
@@ -14,6 +17,7 @@ class ImageFeatures(OESelftestTestCase):
14 test_user = 'tester' 17 test_user = 'tester'
15 root_user = 'root' 18 root_user = 'root'
16 19
20 @OETestTag("runqemu")
17 def test_non_root_user_can_connect_via_ssh_without_password(self): 21 def test_non_root_user_can_connect_via_ssh_without_password(self):
18 """ 22 """
19 Summary: Check if non root user can connect via ssh without password 23 Summary: Check if non root user can connect via ssh without password
@@ -39,6 +43,7 @@ class ImageFeatures(OESelftestTestCase):
39 status, output = ssh.run("true") 43 status, output = ssh.run("true")
40 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output)) 44 self.assertEqual(status, 0, 'ssh to user %s failed with %s' % (user, output))
41 45
46 @OETestTag("runqemu")
42 def test_all_users_can_connect_via_ssh_without_password(self): 47 def test_all_users_can_connect_via_ssh_without_password(self):
43 """ 48 """
44 Summary: Check if all users can connect via ssh without password 49 Summary: Check if all users can connect via ssh without password
@@ -68,18 +73,6 @@ class ImageFeatures(OESelftestTestCase):
68 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output) 73 self.assertEqual(status, 0, 'ssh to user tester failed with %s' % output)
69 74
70 75
71 def test_clutter_image_can_be_built(self):
72 """
73 Summary: Check if clutter image can be built
74 Expected: 1. core-image-clutter can be built
75 Product: oe-core
76 Author: Ionut Chisanovici <ionutx.chisanovici@intel.com>
77 AutomatedBy: Daniel Istrate <daniel.alexandrux.istrate@intel.com>
78 """
79
80 # Build a core-image-clutter
81 bitbake('core-image-clutter')
82
83 def test_wayland_support_in_image(self): 76 def test_wayland_support_in_image(self):
84 """ 77 """
85 Summary: Check Wayland support in image 78 Summary: Check Wayland support in image
@@ -109,12 +102,11 @@ class ImageFeatures(OESelftestTestCase):
109 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"' 102 features = 'IMAGE_FSTYPES += " ext4 ext4.bmap ext4.bmap.gz"'
110 self.write_config(features) 103 self.write_config(features)
111 104
112 image_name = 'core-image-minimal' 105 image = 'core-image-minimal'
113 bitbake(image_name) 106 bitbake(image)
107 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
114 108
115 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 109 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4" % bb_vars['IMAGE_LINK_NAME'])
116 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
117 image_path = os.path.join(deploy_dir_image, "%s.ext4" % link_name)
118 bmap_path = "%s.bmap" % image_path 110 bmap_path = "%s.bmap" % image_path
119 gzip_path = "%s.gz" % bmap_path 111 gzip_path = "%s.gz" % bmap_path
120 112
@@ -127,8 +119,8 @@ class ImageFeatures(OESelftestTestCase):
127 image_stat = os.stat(image_path) 119 image_stat = os.stat(image_path)
128 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512) 120 self.assertGreater(image_stat.st_size, image_stat.st_blocks * 512)
129 121
130 # check if the resulting gzip is valid 122 # check if the resulting gzip is valid, --force is needed in case gzip_path is a symlink
131 self.assertTrue(runCmd('gzip -t %s' % gzip_path)) 123 self.assertTrue(runCmd('gzip --test --force %s' % gzip_path))
132 124
133 def test_hypervisor_fmts(self): 125 def test_hypervisor_fmts(self):
134 """ 126 """
@@ -143,17 +135,16 @@ class ImageFeatures(OESelftestTestCase):
143 img_types = [ 'vmdk', 'vdi', 'qcow2' ] 135 img_types = [ 'vmdk', 'vdi', 'qcow2' ]
144 features = "" 136 features = ""
145 for itype in img_types: 137 for itype in img_types:
146 features += 'IMAGE_FSTYPES += "wic.%s"\n' % itype 138 features += 'IMAGE_FSTYPES += "ext4.%s"\n' % itype
147 self.write_config(features) 139 self.write_config(features)
148 140
149 image_name = 'core-image-minimal' 141 image = 'core-image-minimal'
150 bitbake(image_name) 142 bitbake(image)
143 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
151 144
152 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
153 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
154 for itype in img_types: 145 for itype in img_types:
155 image_path = os.path.join(deploy_dir_image, "%s.wic.%s" % 146 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.ext4.%s" %
156 (link_name, itype)) 147 (bb_vars['IMAGE_LINK_NAME'], itype))
157 148
158 # check if result image file is in deploy directory 149 # check if result image file is in deploy directory
159 self.assertTrue(os.path.exists(image_path)) 150 self.assertTrue(os.path.exists(image_path))
@@ -173,24 +164,22 @@ class ImageFeatures(OESelftestTestCase):
173 """ 164 """
174 Summary: Check for chaining many CONVERSION_CMDs together 165 Summary: Check for chaining many CONVERSION_CMDs together
175 Expected: 1. core-image-minimal can be built with 166 Expected: 1. core-image-minimal can be built with
176 ext4.bmap.gz.bz2.lzo.xz.u-boot and also create a 167 ext4.bmap.gz.bz2.zst.xz.u-boot and also create a
177 sha256sum 168 sha256sum
178 2. The above image has a valid sha256sum 169 2. The above image has a valid sha256sum
179 Product: oe-core 170 Product: oe-core
180 Author: Tom Rini <trini@konsulko.com> 171 Author: Tom Rini <trini@konsulko.com>
181 """ 172 """
182 173
183 conv = "ext4.bmap.gz.bz2.lzo.xz.u-boot" 174 conv = "ext4.bmap.gz.bz2.zst.xz.u-boot"
184 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv) 175 features = 'IMAGE_FSTYPES += "%s %s.sha256sum"' % (conv, conv)
185 self.write_config(features) 176 self.write_config(features)
186 177
187 image_name = 'core-image-minimal' 178 image = 'core-image-minimal'
188 bitbake(image_name) 179 bitbake(image)
189 180 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
190 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 181 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" %
191 link_name = get_bb_var('IMAGE_LINK_NAME', image_name) 182 (bb_vars['IMAGE_LINK_NAME'], conv))
192 image_path = os.path.join(deploy_dir_image, "%s.%s" %
193 (link_name, conv))
194 183
195 # check if resulting image is in the deploy directory 184 # check if resulting image is in the deploy directory
196 self.assertTrue(os.path.exists(image_path)) 185 self.assertTrue(os.path.exists(image_path))
@@ -198,7 +187,7 @@ class ImageFeatures(OESelftestTestCase):
198 187
199 # check if the resulting sha256sum agrees 188 # check if the resulting sha256sum agrees
200 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' % 189 self.assertTrue(runCmd('cd %s;sha256sum -c %s.%s.sha256sum' %
201 (deploy_dir_image, link_name, conv))) 190 (bb_vars['DEPLOY_DIR_IMAGE'], bb_vars['IMAGE_LINK_NAME'], conv)))
202 191
203 def test_image_fstypes(self): 192 def test_image_fstypes(self):
204 """ 193 """
@@ -207,26 +196,43 @@ class ImageFeatures(OESelftestTestCase):
207 Product: oe-core 196 Product: oe-core
208 Author: Ed Bartosh <ed.bartosh@linux.intel.com> 197 Author: Ed Bartosh <ed.bartosh@linux.intel.com>
209 """ 198 """
210 image_name = 'core-image-minimal' 199 image = 'core-image-minimal'
211 200
212 all_image_types = set(get_bb_var("IMAGE_TYPES", image_name).split()) 201 all_image_types = set(get_bb_var("IMAGE_TYPES", image).split())
213 blacklist = set(('container', 'elf', 'f2fs', 'multiubi', 'tar.zst', 'wic.zst')) 202 skip_image_types = set(('container', 'elf', 'f2fs', 'tar.zst', 'wic.zst', 'squashfs-lzo', 'vfat'))
214 img_types = all_image_types - blacklist 203 img_types = all_image_types - skip_image_types
215 204
216 config = 'IMAGE_FSTYPES += "%s"\n'\ 205 config = """
217 'MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"\n'\ 206IMAGE_FSTYPES += "%s"
218 'UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"' % ' '.join(img_types) 207WKS_FILE = "wictestdisk.wks"
208MKUBIFS_ARGS ?= "-m 2048 -e 129024 -c 2047"
209UBINIZE_ARGS ?= "-m 2048 -p 128KiB -s 512"
210MULTIUBI_BUILD += "mtd_2_128"
211MKUBIFS_ARGS_mtd_2_128 ?= "-m 2048 -e 129024 -c 2047"
212UBINIZE_ARGS_mtd_2_128 ?= "-m 2048 -p 128KiB -s 512"
213MULTIUBI_BUILD += "mtd_4_256"
214MKUBIFS_ARGS_mtd_4_256 ?= "-m 4096 -e 253952 -c 4096"
215UBINIZE_ARGS_mtd_4_256 ?= "-m 4096 -p 256KiB"
216""" % ' '.join(img_types)
219 self.write_config(config) 217 self.write_config(config)
220 218
221 bitbake(image_name) 219 bitbake(image)
220 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'MULTIUBI_BUILD'], image)
222 221
223 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
224 link_name = get_bb_var('IMAGE_LINK_NAME', image_name)
225 for itype in img_types: 222 for itype in img_types:
226 image_path = os.path.join(deploy_dir_image, "%s.%s" % (link_name, itype)) 223 if itype == 'multiubi':
227 # check if result image is in deploy directory 224 # For multiubi build we need to manage MULTIUBI_BUILD entry to append
228 self.assertTrue(os.path.exists(image_path), 225 # specific name to IMAGE_LINK_NAME
229 "%s image %s doesn't exist" % (itype, image_path)) 226 for vname in bb_vars['MULTIUBI_BUILD'].split():
227 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s_%s.ubifs" % (bb_vars['IMAGE_LINK_NAME'], vname))
228 # check if result image is in deploy directory
229 self.assertTrue(os.path.exists(image_path),
230 "%s image %s doesn't exist" % (itype, image_path))
231 else:
232 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.%s" % (bb_vars['IMAGE_LINK_NAME'], itype))
233 # check if result image is in deploy directory
234 self.assertTrue(os.path.exists(image_path),
235 "%s image %s doesn't exist" % (itype, image_path))
230 236
231 def test_useradd_static(self): 237 def test_useradd_static(self):
232 config = """ 238 config = """
@@ -240,11 +246,11 @@ USERADD_GID_TABLES += "files/static-group"
240 246
241 def test_no_busybox_base_utils(self): 247 def test_no_busybox_base_utils(self):
242 config = """ 248 config = """
243# Enable x11 249# Enable wayland
244DISTRO_FEATURES_append += "x11" 250DISTRO_FEATURES:append = " pam opengl wayland"
245 251
246# Switch to systemd 252# Switch to systemd
247DISTRO_FEATURES += "systemd" 253DISTRO_FEATURES:append = " systemd usrmerge"
248VIRTUAL-RUNTIME_init_manager = "systemd" 254VIRTUAL-RUNTIME_init_manager = "systemd"
249VIRTUAL-RUNTIME_initscripts = "" 255VIRTUAL-RUNTIME_initscripts = ""
250VIRTUAL-RUNTIME_syslog = "" 256VIRTUAL-RUNTIME_syslog = ""
@@ -257,12 +263,12 @@ VIRTUAL-RUNTIME_base-utils = "packagegroup-core-base-utils"
257VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock" 263VIRTUAL-RUNTIME_base-utils-hwclock = "util-linux-hwclock"
258VIRTUAL-RUNTIME_base-utils-syslog = "" 264VIRTUAL-RUNTIME_base-utils-syslog = ""
259 265
260# Blacklist busybox 266# Skip busybox
261PNBLACKLIST[busybox] = "Don't build this" 267SKIP_RECIPE[busybox] = "Don't build this"
262""" 268"""
263 self.write_config(config) 269 self.write_config(config)
264 270
265 bitbake("--graphviz core-image-sato") 271 bitbake("--graphviz core-image-weston")
266 272
267 def test_image_gen_debugfs(self): 273 def test_image_gen_debugfs(self):
268 """ 274 """
@@ -275,20 +281,20 @@ PNBLACKLIST[busybox] = "Don't build this"
275 Yeoh Ee Peng <ee.peng.yeoh@intel.com> 281 Yeoh Ee Peng <ee.peng.yeoh@intel.com>
276 """ 282 """
277 283
278 image_name = 'core-image-minimal' 284 image = 'core-image-minimal'
285 image_fstypes_debugfs = 'tar.bz2'
279 features = 'IMAGE_GEN_DEBUGFS = "1"\n' 286 features = 'IMAGE_GEN_DEBUGFS = "1"\n'
280 features += 'IMAGE_FSTYPES_DEBUGFS = "tar.bz2"\n' 287 features += 'IMAGE_FSTYPES_DEBUGFS = "%s"\n' % image_fstypes_debugfs
281 features += 'MACHINE = "genericx86-64"\n'
282 self.write_config(features) 288 self.write_config(features)
283 289
284 bitbake(image_name) 290 bitbake(image)
285 deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 291 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
286 dbg_tar_file = os.path.join(deploy_dir_image, "*-dbg.rootfs.tar.bz2") 292
287 debug_files = glob.glob(dbg_tar_file) 293 dbg_tar_file = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s-dbg.%s" % (bb_vars['IMAGE_LINK_NAME'], image_fstypes_debugfs))
288 self.assertNotEqual(len(debug_files), 0, 'debug filesystem not generated at %s' % dbg_tar_file) 294 self.assertTrue(os.path.exists(dbg_tar_file), 'debug filesystem not generated at %s' % dbg_tar_file)
289 result = runCmd('cd %s; tar xvf %s' % (deploy_dir_image, dbg_tar_file)) 295 result = runCmd('cd %s; tar xvf %s' % (bb_vars['DEPLOY_DIR_IMAGE'], dbg_tar_file))
290 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output)) 296 self.assertEqual(result.status, 0, msg='Failed to extract %s: %s' % (dbg_tar_file, result.output))
291 result = runCmd('find %s -name %s' % (deploy_dir_image, "udevadm")) 297 result = runCmd('find %s -name %s' % (bb_vars['DEPLOY_DIR_IMAGE'], "udevadm"))
292 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output) 298 self.assertTrue("udevadm" in result.output, msg='Failed to find udevadm: %s' % result.output)
293 dbg_symbols_targets = result.output.splitlines() 299 dbg_symbols_targets = result.output.splitlines()
294 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets) 300 self.assertTrue(dbg_symbols_targets, msg='Failed to split udevadm: %s' % dbg_symbols_targets)
@@ -298,9 +304,33 @@ PNBLACKLIST[busybox] = "Don't build this"
298 304
299 def test_empty_image(self): 305 def test_empty_image(self):
300 """Test creation of image with no packages""" 306 """Test creation of image with no packages"""
301 bitbake('test-empty-image') 307 image = 'test-empty-image'
302 res_dir = get_bb_var('DEPLOY_DIR_IMAGE') 308 bitbake(image)
303 images = os.path.join(res_dir, "test-empty-image-*.manifest") 309 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
304 result = glob.glob(images) 310 manifest = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "%s.manifest" % bb_vars['IMAGE_LINK_NAME'])
305 with open(result[1],"r") as f: 311 self.assertTrue(os.path.exists(manifest))
312
313 with open(manifest, "r") as f:
306 self.assertEqual(len(f.read().strip()),0) 314 self.assertEqual(len(f.read().strip()),0)
315
316 def test_mandb(self):
317 """
318 Test that an image containing manpages has working man and apropos commands.
319 """
320 config = """
321DISTRO_FEATURES:append = " api-documentation"
322CORE_IMAGE_EXTRA_INSTALL = "man-pages kmod-doc"
323"""
324 self.write_config(config)
325 bitbake("core-image-minimal")
326
327 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
328 # This manpage is provided by man-pages
329 status, output = qemu.run_serial("apropos 8859")
330 self.assertEqual(status, 1, 'Failed to run apropos: %s' % (output))
331 self.assertIn("iso_8859_15", output)
332
333 # This manpage is provided by kmod
334 status, output = qemu.run_serial("man --pager=cat modprobe")
335 self.assertEqual(status, 1, 'Failed to run man: %s' % (output))
336 self.assertIn("force-modversion", output)
diff --git a/meta/lib/oeqa/selftest/cases/incompatible_lic.py b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
index 152da6332a..f4af67a239 100644
--- a/meta/lib/oeqa/selftest/cases/incompatible_lic.py
+++ b/meta/lib/oeqa/selftest/cases/incompatible_lic.py
@@ -1,10 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
1from oeqa.selftest.case import OESelftestTestCase 6from oeqa.selftest.case import OESelftestTestCase
2from oeqa.utils.commands import bitbake 7from oeqa.utils.commands import bitbake
3 8
4class IncompatibleLicenseTests(OESelftestTestCase): 9class IncompatibleLicenseTestObsolete(OESelftestTestCase):
5 10
6 def lic_test(self, pn, pn_lic, lic): 11 def lic_test(self, pn, pn_lic, lic, error_msg=None):
7 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic) 12 if not error_msg:
13 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
8 14
9 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic)) 15 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
10 16
@@ -12,72 +18,81 @@ class IncompatibleLicenseTests(OESelftestTestCase):
12 if error_msg not in result.output: 18 if error_msg not in result.output:
13 raise AssertionError(result.output) 19 raise AssertionError(result.output)
14 20
15 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 21 # Verify that a package with an SPDX license cannot be built when
16 # cannot be built when INCOMPATIBLE_LICENSE contains this SPDX license 22 # INCOMPATIBLE_LICENSE contains an alias (in SPDXLICENSEMAP) of this SPDX
17 def test_incompatible_spdx_license(self): 23 # license
18 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
19
20 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
21 # cannot be built when INCOMPATIBLE_LICENSE contains an alias (in
22 # SPDXLICENSEMAP) of this SPDX license
23 def test_incompatible_alias_spdx_license(self): 24 def test_incompatible_alias_spdx_license(self):
24 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3') 25 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
25
26 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES)
27 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded license
28 # matching this SPDX license
29 def test_incompatible_spdx_license_wildcard(self):
30 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPL-3.0-only')
31 26
32 # Verify that a package with an SPDX license (from AVAILABLE_LICENSES) 27 # Verify that a package with an SPDX license cannot be built when
33 # cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded alias 28 # INCOMPATIBLE_LICENSE contains a wildcarded alias license matching this
34 # license matching this SPDX license 29 # SPDX license
35 def test_incompatible_alias_spdx_license_wildcard(self): 30 def test_incompatible_alias_spdx_license_wildcard(self):
36 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3') 31 self.lic_test('incompatible-license', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
37
38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
39 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
40 # license
41 def test_incompatible_spdx_license_alias(self):
42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
43 32
44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 33 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
45 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias 34 # license cannot be built when INCOMPATIBLE_LICENSE contains this alias
46 def test_incompatible_alias_spdx_license_alias(self): 35 def test_incompatible_alias_spdx_license_alias(self):
47 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3') 36 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPLv3', "is an obsolete license, please use an SPDX reference in INCOMPATIBLE_LICENSE")
48 37
49 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 38 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
50 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 39 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
51 # license matching this SPDX license 40 # license matching this SPDX license
52 def test_incompatible_spdx_license_alias_wildcard(self): 41 def test_incompatible_spdx_license_alias_wildcard(self):
53 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0') 42 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPL-3.0', "*GPL-3.0 is an invalid license wildcard entry")
54 43
55 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX 44 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
56 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded 45 # license cannot be built when INCOMPATIBLE_LICENSE contains a wildcarded
57 # alias license matching the SPDX license 46 # alias license matching the SPDX license
58 def test_incompatible_alias_spdx_license_alias_wildcard(self): 47 def test_incompatible_alias_spdx_license_alias_wildcard(self):
59 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3') 48 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', '*GPLv3', "*GPLv3 is an invalid license wildcard entry")
60 49
61 # Verify that a package with multiple SPDX licenses (from
62 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains
63 # some of them
64 def test_incompatible_spdx_licenses(self):
65 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
66 50
67 # Verify that a package with multiple SPDX licenses (from 51 # Verify that a package with multiple SPDX licenses cannot be built when
68 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 52 # INCOMPATIBLE_LICENSE contains a wildcard to some of them
69 # wildcard to some of them
70 def test_incompatible_spdx_licenses_wildcard(self): 53 def test_incompatible_spdx_licenses_wildcard(self):
71 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only') 54 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', '*GPL-3.0-only', "*GPL-3.0-only is an invalid license wildcard entry")
72 55
73 # Verify that a package with multiple SPDX licenses (from 56
74 # AVAILABLE_LICENSES) cannot be built when INCOMPATIBLE_LICENSE contains a 57 # Verify that a package with multiple SPDX licenses cannot be built when
75 # wildcard matching all licenses 58 # INCOMPATIBLE_LICENSE contains a wildcard matching all licenses
76 def test_incompatible_all_licenses_wildcard(self): 59 def test_incompatible_all_licenses_wildcard(self):
77 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*') 60 self.lic_test('incompatible-licenses', 'GPL-2.0-only GPL-3.0-only LGPL-3.0-only', '*', "* is an invalid license wildcard entry")
61
62class IncompatibleLicenseTests(OESelftestTestCase):
63
64 def lic_test(self, pn, pn_lic, lic):
65 error_msg = 'ERROR: Nothing PROVIDES \'%s\'\n%s was skipped: it has incompatible license(s): %s' % (pn, pn, pn_lic)
66
67 self.write_config("INCOMPATIBLE_LICENSE += \"%s\"" % (lic))
68
69 result = bitbake('%s --dry-run' % (pn), ignore_status=True)
70 if error_msg not in result.output:
71 raise AssertionError(result.output)
72
73 # Verify that a package with an SPDX license cannot be built when
74 # INCOMPATIBLE_LICENSE contains this SPDX license
75 def test_incompatible_spdx_license(self):
76 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0-only')
78 77
79 # Verify that a package with a non-SPDX license (neither in 78 # Verify that a package with an SPDX license cannot be built when
80 # AVAILABLE_LICENSES nor in SPDXLICENSEMAP) cannot be built when 79 # INCOMPATIBLE_LICENSE contains a wildcarded license matching this SPDX
80 # license
81 def test_incompatible_spdx_license_wildcard(self):
82 self.lic_test('incompatible-license', 'GPL-3.0-only', 'GPL-3.0*')
83
84 # Verify that a package with an alias (from SPDXLICENSEMAP) to an SPDX
85 # license cannot be built when INCOMPATIBLE_LICENSE contains this SPDX
86 # license
87 def test_incompatible_spdx_license_alias(self):
88 self.lic_test('incompatible-license-alias', 'GPL-3.0-only', 'GPL-3.0-only')
89
90 # Verify that a package with multiple SPDX licenses cannot be built when
91 # INCOMPATIBLE_LICENSE contains some of them
92 def test_incompatible_spdx_licenses(self):
93 self.lic_test('incompatible-licenses', 'GPL-3.0-only LGPL-3.0-only', 'GPL-3.0-only LGPL-3.0-only')
94
95 # Verify that a package with a non-SPDX license cannot be built when
81 # INCOMPATIBLE_LICENSE contains this license 96 # INCOMPATIBLE_LICENSE contains this license
82 def test_incompatible_nonspdx_license(self): 97 def test_incompatible_nonspdx_license(self):
83 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense') 98 self.lic_test('incompatible-nonspdx-license', 'FooLicense', 'FooLicense')
@@ -85,8 +100,8 @@ class IncompatibleLicenseTests(OESelftestTestCase):
85class IncompatibleLicensePerImageTests(OESelftestTestCase): 100class IncompatibleLicensePerImageTests(OESelftestTestCase):
86 def default_config(self): 101 def default_config(self):
87 return """ 102 return """
88IMAGE_INSTALL_append = " bash" 103IMAGE_INSTALL:append = " bash"
89INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 104INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
90""" 105"""
91 106
92 def test_bash_default(self): 107 def test_bash_default(self):
@@ -98,7 +113,8 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
98 raise AssertionError(result.output) 113 raise AssertionError(result.output)
99 114
100 def test_bash_and_license(self): 115 def test_bash_and_license(self):
101 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " & SomeLicense"') 116 self.disable_class("create-spdx")
117 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " & SomeLicense"')
102 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later" 118 error_msg = "ERROR: core-image-minimal-1.0-r0 do_rootfs: Package bash cannot be installed into the image because it has incompatible license(s): GPL-3.0-or-later"
103 119
104 result = bitbake('core-image-minimal', ignore_status=True) 120 result = bitbake('core-image-minimal', ignore_status=True)
@@ -106,30 +122,31 @@ INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0"
106 raise AssertionError(result.output) 122 raise AssertionError(result.output)
107 123
108 def test_bash_or_license(self): 124 def test_bash_or_license(self):
109 self.write_config(self.default_config() + '\nLICENSE_append_pn-bash = " | SomeLicense"') 125 self.disable_class("create-spdx")
126 self.write_config(self.default_config() + '\nLICENSE:append:pn-bash = " | SomeLicense"')
110 127
111 bitbake('core-image-minimal') 128 bitbake('core-image-minimal')
112 129
113 def test_bash_whitelist(self): 130 def test_bash_license_exceptions(self):
114 self.write_config(self.default_config() + '\nWHITELIST_GPL-3.0_pn-core-image-minimal = "bash"') 131 self.write_config(self.default_config() + '\nINCOMPATIBLE_LICENSE_EXCEPTIONS:pn-core-image-minimal = "bash:GPL-3.0-or-later"')
115 132
116 bitbake('core-image-minimal') 133 bitbake('core-image-minimal')
117 134
118class NoGPL3InImagesTests(OESelftestTestCase): 135class NoGPL3InImagesTests(OESelftestTestCase):
119 def test_core_image_minimal(self): 136 def test_core_image_minimal(self):
120 self.write_config(""" 137 self.write_config("""
121INCOMPATIBLE_LICENSE_pn-core-image-minimal = "GPL-3.0 LGPL-3.0" 138INCOMPATIBLE_LICENSE:pn-core-image-minimal = "GPL-3.0* LGPL-3.0*"
122""") 139""")
123 bitbake('core-image-minimal') 140 bitbake('core-image-minimal')
124 141
125 def test_core_image_full_cmdline(self): 142 def test_core_image_full_cmdline_weston(self):
126 self.write_config(""" 143 self.write_config("""
127INHERIT += "testimage"\n 144IMAGE_CLASSES += "testimage"
128INCOMPATIBLE_LICENSE_pn-core-image-full-cmdline = "GPL-3.0 LGPL-3.0"\n 145INCOMPATIBLE_LICENSE:pn-core-image-full-cmdline = "GPL-3.0* LGPL-3.0*"
129RDEPENDS_packagegroup-core-full-cmdline-utils_remove = "bash bc coreutils cpio ed findutils gawk grep mc mc-fish mc-helpers mc-helpers-perl sed tar time"\n 146INCOMPATIBLE_LICENSE:pn-core-image-weston = "GPL-3.0* LGPL-3.0*"
130RDEPENDS_packagegroup-core-full-cmdline-dev-utils_remove = "diffutils m4 make patch"\n 147
131RDEPENDS_packagegroup-core-full-cmdline-multiuser_remove = "gzip"\n 148require conf/distro/include/no-gplv3.inc
132""") 149""")
133 bitbake('core-image-full-cmdline') 150 bitbake('core-image-full-cmdline core-image-weston')
134 bitbake('-c testimage core-image-full-cmdline') 151 bitbake('-c testimage core-image-full-cmdline core-image-weston')
135 152
diff --git a/meta/lib/oeqa/selftest/cases/intercept.py b/meta/lib/oeqa/selftest/cases/intercept.py
new file mode 100644
index 0000000000..12583c3099
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/intercept.py
@@ -0,0 +1,21 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class GitCheck(OESelftestTestCase):
11 def test_git_intercept(self):
12 """
13 Git binaries with CVE-2022-24765 fixed will refuse to operate on a
14 repository which is owned by a different user. This breaks our
15 do_install task as that runs inside pseudo, so the git repository is
16 owned by the build user but git is running as (fake)root.
17
18 We have an intercept which disables pseudo, so verify that it works.
19 """
20 bitbake("git-submodule-test -c test_git_as_user")
21 bitbake("git-submodule-test -c test_git_as_root")
diff --git a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
index a61876ee61..b1f78a0cd1 100644
--- a/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
+++ b/meta/lib/oeqa/selftest/cases/kerneldevelopment.py
@@ -1,3 +1,9 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
1import os 7import os
2from oeqa.selftest.case import OESelftestTestCase 8from oeqa.selftest.case import OESelftestTestCase
3from oeqa.utils.commands import runCmd, get_bb_var 9from oeqa.utils.commands import runCmd, get_bb_var
@@ -58,7 +64,8 @@ class KernelDev(OESelftestTestCase):
58 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend') 64 recipe_append = os.path.join(self.recipeskernel_dir, 'linux-yocto_%.bbappend')
59 with open(recipe_append, 'w+') as fh: 65 with open(recipe_append, 'w+') as fh:
60 fh.write('SRC_URI += "file://%s"\n' % patch_name) 66 fh.write('SRC_URI += "file://%s"\n' % patch_name)
61 fh.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"') 67 fh.write('ERROR_QA:remove:pn-linux-yocto = "patch-status"\n')
68 fh.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"')
62 69
63 runCmd('bitbake virtual/kernel -c clean') 70 runCmd('bitbake virtual/kernel -c clean')
64 runCmd('bitbake virtual/kernel -c patch') 71 runCmd('bitbake virtual/kernel -c patch')
diff --git a/meta/lib/oeqa/selftest/cases/layerappend.py b/meta/lib/oeqa/selftest/cases/layerappend.py
index 05e9426fc6..379ed589ad 100644
--- a/meta/lib/oeqa/selftest/cases/layerappend.py
+++ b/meta/lib/oeqa/selftest/cases/layerappend.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var 10from oeqa.utils.commands import bitbake, get_bb_var
9import oeqa.utils.ftools as ftools 11import oeqa.utils.ftools as ftools
10 12
11class LayerAppendTests(OESelftestTestCase): 13class LayerAppendTests(OESelftestTestCase):
@@ -30,20 +32,20 @@ python do_build() {
30addtask build 32addtask build
31""" 33"""
32 append = """ 34 append = """
33FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 35FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
34 36
35SRC_URI_append = " file://appendtest.txt" 37SRC_URI:append = " file://appendtest.txt"
36 38
37sysroot_stage_all_append() { 39sysroot_stage_all:append() {
38 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/ 40 install -m 644 ${WORKDIR}/appendtest.txt ${SYSROOT_DESTDIR}/
39} 41}
40 42
41""" 43"""
42 44
43 append2 = """ 45 append2 = """
44FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:" 46FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"
45 47
46SRC_URI_append = " file://appendtest.txt" 48SRC_URI:append = " file://appendtest.txt"
47""" 49"""
48 layerappend = '' 50 layerappend = ''
49 51
diff --git a/meta/lib/oeqa/selftest/cases/liboe.py b/meta/lib/oeqa/selftest/cases/liboe.py
index afe8f8809f..d5ffffdcb4 100644
--- a/meta/lib/oeqa/selftest/cases/liboe.py
+++ b/meta/lib/oeqa/selftest/cases/liboe.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -97,6 +99,6 @@ class LibOE(OESelftestTestCase):
97 99
98 dstcnt = len(os.listdir(dst)) 100 dstcnt = len(os.listdir(dst))
99 srccnt = len(os.listdir(src)) 101 srccnt = len(os.listdir(src))
100 self.assertEquals(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt)) 102 self.assertEqual(dstcnt, len(testfiles), "Number of files in dst (%s) differs from number of files in src(%s)." % (dstcnt, srccnt))
101 103
102 oe.path.remove(testloc) 104 oe.path.remove(testloc)
diff --git a/meta/lib/oeqa/selftest/cases/lic_checksum.py b/meta/lib/oeqa/selftest/cases/lic_checksum.py
index bae935d697..2d0b805b90 100644
--- a/meta/lib/oeqa/selftest/cases/lic_checksum.py
+++ b/meta/lib/oeqa/selftest/cases/lic_checksum.py
@@ -1,16 +1,36 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import tempfile 8import tempfile
9import urllib
7 10
8from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake 12from oeqa.utils.commands import bitbake
10from oeqa.utils import CommandError
11 13
12class LicenseTests(OESelftestTestCase): 14class LicenseTests(OESelftestTestCase):
13 15
16 def test_checksum_with_space(self):
17 bitbake_cmd = '-c populate_lic emptytest'
18
19 lic_file, lic_path = tempfile.mkstemp(" -afterspace")
20 os.close(lic_file)
21 #self.track_for_cleanup(lic_path)
22
23 self.write_config("INHERIT:remove = \"report-error\"")
24
25 self.write_recipeinc('emptytest', """
26INHIBIT_DEFAULT_DEPS = "1"
27LIC_FILES_CHKSUM = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
28SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
29""" % (urllib.parse.quote(lic_path), urllib.parse.quote(lic_path)))
30 result = bitbake(bitbake_cmd)
31 self.delete_recipeinc('emptytest')
32
33
14 # Verify that changing a license file that has an absolute path causes 34 # Verify that changing a license file that has an absolute path causes
15 # the license qa to fail due to a mismatched md5sum. 35 # the license qa to fail due to a mismatched md5sum.
16 def test_nonmatching_checksum(self): 36 def test_nonmatching_checksum(self):
@@ -21,7 +41,7 @@ class LicenseTests(OESelftestTestCase):
21 os.close(lic_file) 41 os.close(lic_file)
22 self.track_for_cleanup(lic_path) 42 self.track_for_cleanup(lic_path)
23 43
24 self.write_config("INHERIT_remove = \"report-error\"") 44 self.write_config("INHERIT:remove = \"report-error\"")
25 45
26 self.write_recipeinc('emptytest', """ 46 self.write_recipeinc('emptytest', """
27INHIBIT_DEFAULT_DEPS = "1" 47INHIBIT_DEFAULT_DEPS = "1"
@@ -34,5 +54,6 @@ SRC_URI = "file://%s;md5=d41d8cd98f00b204e9800998ecf8427e"
34 f.write("data") 54 f.write("data")
35 55
36 result = bitbake(bitbake_cmd, ignore_status=True) 56 result = bitbake(bitbake_cmd, ignore_status=True)
57 self.delete_recipeinc('emptytest')
37 if error_msg not in result.output: 58 if error_msg not in result.output:
38 raise AssertionError(result.output) 59 raise AssertionError(result.output)
diff --git a/meta/lib/oeqa/selftest/cases/locales.py b/meta/lib/oeqa/selftest/cases/locales.py
new file mode 100644
index 0000000000..4ca8ffb7aa
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/locales.py
@@ -0,0 +1,54 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5from oeqa.selftest.case import OESelftestTestCase
6from oeqa.core.decorator import OETestTag
7from oeqa.utils.commands import bitbake, runqemu
8
9class LocalesTest(OESelftestTestCase):
10
11 @OETestTag("runqemu")
12
13 def run_locales_test(self, binary_enabled):
14 features = []
15 features.append('EXTRA_IMAGE_FEATURES = "empty-root-password allow-empty-password allow-root-login"')
16 features.append('IMAGE_INSTALL:append = " glibc-utils localedef"')
17 features.append('GLIBC_GENERATE_LOCALES = "en_US.UTF-8 fr_FR.UTF-8"')
18 features.append('IMAGE_LINGUAS:append = " en-us fr-fr"')
19 if binary_enabled:
20 features.append('ENABLE_BINARY_LOCALE_GENERATION = "1"')
21 else:
22 features.append('ENABLE_BINARY_LOCALE_GENERATION = "0"')
23 self.write_config("\n".join(features))
24
25 # Build a core-image-minimal
26 bitbake('core-image-minimal')
27
28 with runqemu("core-image-minimal", ssh=False, runqemuparams='nographic') as qemu:
29 cmd = "locale -a"
30 status, output = qemu.run_serial(cmd)
31 # output must includes fr_FR or fr_FR.UTF-8
32 self.assertEqual(status, 1, msg='locale test command failed: output: %s' % output)
33 self.assertIn("fr_FR", output, msg='locale -a test failed: output: %s' % output)
34
35 cmd = "localedef --list-archive -v"
36 status, output = qemu.run_serial(cmd)
37 # output must includes fr_FR.utf8
38 self.assertEqual(status, 1, msg='localedef test command failed: output: %s' % output)
39 self.assertIn("fr_FR.utf8", output, msg='localedef test failed: output: %s' % output)
40
41 def test_locales_on(self):
42 """
43 Summary: Test the locales are generated
44 Expected: 1. Check the locale exist in the locale-archive
45 2. Check the locale exist for the glibc
46 3. Check the locale can be generated
47 Product: oe-core
48 Author: Louis Rannou <lrannou@baylibre.com>
49 AutomatedBy: Louis Rannou <lrannou@baylibre.com>
50 """
51 self.run_locales_test(True)
52
53 def test_locales_off(self):
54 self.run_locales_test(False)
diff --git a/meta/lib/oeqa/selftest/cases/manifest.py b/meta/lib/oeqa/selftest/cases/manifest.py
index 5d13f35468..07a6c80489 100644
--- a/meta/lib/oeqa/selftest/cases/manifest.py
+++ b/meta/lib/oeqa/selftest/cases/manifest.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake 10from oeqa.utils.commands import get_bb_var, bitbake
9 11
10class ManifestEntry: 12class ManifestEntry:
11 '''A manifest item of a collection able to list missing packages''' 13 '''A manifest item of a collection able to list missing packages'''
diff --git a/meta/lib/oeqa/selftest/cases/meta_ide.py b/meta/lib/oeqa/selftest/cases/meta_ide.py
index 6f10d30dc9..ffe0d2604d 100644
--- a/meta/lib/oeqa/selftest/cases/meta_ide.py
+++ b/meta/lib/oeqa/selftest/cases/meta_ide.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -16,13 +18,15 @@ class MetaIDE(OESelftestTestCase):
16 def setUpClass(cls): 18 def setUpClass(cls):
17 super(MetaIDE, cls).setUpClass() 19 super(MetaIDE, cls).setUpClass()
18 bitbake('meta-ide-support') 20 bitbake('meta-ide-support')
19 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'TMPDIR', 'COREBASE']) 21 bitbake('build-sysroots -c build_native_sysroot')
22 bitbake('build-sysroots -c build_target_sysroot')
23 bb_vars = get_bb_vars(['MULTIMACH_TARGET_SYS', 'DEPLOY_DIR_IMAGE', 'COREBASE'])
20 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS'] 24 cls.environment_script = 'environment-setup-%s' % bb_vars['MULTIMACH_TARGET_SYS']
21 cls.tmpdir = bb_vars['TMPDIR'] 25 cls.deploydir = bb_vars['DEPLOY_DIR_IMAGE']
22 cls.environment_script_path = '%s/%s' % (cls.tmpdir, cls.environment_script) 26 cls.environment_script_path = '%s/%s' % (cls.deploydir, cls.environment_script)
23 cls.corebasedir = bb_vars['COREBASE'] 27 cls.corebasedir = bb_vars['COREBASE']
24 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide') 28 cls.tmpdir_metaideQA = tempfile.mkdtemp(prefix='metaide')
25 29
26 @classmethod 30 @classmethod
27 def tearDownClass(cls): 31 def tearDownClass(cls):
28 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True) 32 shutil.rmtree(cls.tmpdir_metaideQA, ignore_errors=True)
@@ -40,12 +44,17 @@ class MetaIDE(OESelftestTestCase):
40 def test_meta_ide_can_build_cpio_project(self): 44 def test_meta_ide_can_build_cpio_project(self):
41 dl_dir = self.td.get('DL_DIR', None) 45 dl_dir = self.td.get('DL_DIR', None)
42 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path, 46 self.project = SDKBuildProject(self.tmpdir_metaideQA + "/cpio/", self.environment_script_path,
43 "https://ftp.gnu.org/gnu/cpio/cpio-2.13.tar.gz", 47 "https://ftp.gnu.org/gnu/cpio/cpio-2.15.tar.gz",
44 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir) 48 self.tmpdir_metaideQA, self.td['DATETIME'], dl_dir=dl_dir)
45 self.project.download_archive() 49 self.project.download_archive()
46 self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS --disable-maintainer-mode','sed -i -e "/char \*program_name/d" src/global.c;'), 0, 50 self.assertEqual(self.project.run_configure('$CONFIGURE_FLAGS'), 0,
47 msg="Running configure failed") 51 msg="Running configure failed")
48 self.assertEqual(self.project.run_make(), 0, 52 self.assertEqual(self.project.run_make(), 0,
49 msg="Running make failed") 53 msg="Running make failed")
50 self.assertEqual(self.project.run_install(), 0, 54 self.assertEqual(self.project.run_install(), 0,
51 msg="Running make install failed") 55 msg="Running make install failed")
56
57 def test_meta_ide_can_run_sdk_tests(self):
58 bitbake('-c populate_sysroot gtk+3')
59 bitbake('build-sysroots -c build_target_sysroot')
60 bitbake('-c testsdk meta-ide-support')
diff --git a/meta/lib/oeqa/selftest/cases/minidebuginfo.py b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
new file mode 100644
index 0000000000..2919f07939
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/minidebuginfo.py
@@ -0,0 +1,44 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6import os
7import subprocess
8import tempfile
9import shutil
10
11from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import bitbake, get_bb_var, get_bb_vars, runCmd
13
14
15class Minidebuginfo(OESelftestTestCase):
16 def test_minidebuginfo(self):
17 target_sys = get_bb_var("TARGET_SYS")
18 binutils = "binutils-cross-{}".format(get_bb_var("TARGET_ARCH"))
19
20 image = 'core-image-minimal'
21 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME', 'READELF'], image)
22
23 self.write_config("""
24DISTRO_FEATURES:append = " minidebuginfo"
25IMAGE_FSTYPES = "tar.bz2"
26""")
27 bitbake("{} {}:do_addto_recipe_sysroot".format(image, binutils))
28
29 native_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", binutils)
30
31 # confirm that executables and shared libraries contain an ELF section
32 # ".gnu_debugdata" which stores minidebuginfo.
33 with tempfile.TemporaryDirectory(prefix = "unpackfs-") as unpackedfs:
34 filename = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], "{}.tar.bz2".format(bb_vars['IMAGE_LINK_NAME']))
35 shutil.unpack_archive(filename, unpackedfs)
36
37 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "bin", "busybox")],
38 native_sysroot = native_sysroot, target_sys = target_sys)
39 self.assertIn(".gnu_debugdata", r.output)
40
41 r = runCmd([bb_vars['READELF'], "-W", "-S", os.path.join(unpackedfs, "lib", "libc.so.6")],
42 native_sysroot = native_sysroot, target_sys = target_sys)
43 self.assertIn(".gnu_debugdata", r.output)
44
diff --git a/meta/lib/oeqa/selftest/cases/multiconfig.py b/meta/lib/oeqa/selftest/cases/multiconfig.py
index 39b92f2439..f509cbf607 100644
--- a/meta/lib/oeqa/selftest/cases/multiconfig.py
+++ b/meta/lib/oeqa/selftest/cases/multiconfig.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -17,7 +19,7 @@ class MultiConfig(OESelftestTestCase):
17 """ 19 """
18 20
19 config = """ 21 config = """
20IMAGE_INSTALL_append_pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl" 22IMAGE_INSTALL:append:pn-core-image-full-cmdline = " multiconfig-image-packager-tiny multiconfig-image-packager-musl"
21BBMULTICONFIG = "tiny musl" 23BBMULTICONFIG = "tiny musl"
22""" 24"""
23 self.write_config(config) 25 self.write_config(config)
@@ -52,7 +54,7 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
52 self.write_config(config) 54 self.write_config(config)
53 55
54 testconfig = textwrap.dedent('''\ 56 testconfig = textwrap.dedent('''\
55 MCTESTVAR_append = "1" 57 MCTESTVAR:append = "1"
56 ''') 58 ''')
57 self.write_config(testconfig, 'test') 59 self.write_config(testconfig, 'test')
58 60
@@ -64,9 +66,22 @@ TMPDIR = "${TOPDIR}/tmp-mc-tiny"
64 self.assertIn('MCTESTVAR=test1', result.output.splitlines()) 66 self.assertIn('MCTESTVAR=test1', result.output.splitlines())
65 67
66 testconfig = textwrap.dedent('''\ 68 testconfig = textwrap.dedent('''\
67 MCTESTVAR_append = "2" 69 MCTESTVAR:append = "2"
68 ''') 70 ''')
69 self.write_config(testconfig, 'test') 71 self.write_config(testconfig, 'test')
70 72
71 result = bitbake('mc:test:multiconfig-test-parse -c showvar') 73 result = bitbake('mc:test:multiconfig-test-parse -c showvar')
72 self.assertIn('MCTESTVAR=test2', result.output.splitlines()) 74 self.assertIn('MCTESTVAR=test2', result.output.splitlines())
75
76 def test_multiconfig_inlayer(self):
77 """
78 Test that a multiconfig from meta-selftest works.
79 """
80
81 config = """
82BBMULTICONFIG = "muslmc"
83"""
84 self.write_config(config)
85
86 # Build a core-image-minimal, only dry run needed to check config is present
87 bitbake('mc:muslmc:bash -n')
diff --git a/meta/lib/oeqa/selftest/cases/newlib.py b/meta/lib/oeqa/selftest/cases/newlib.py
new file mode 100644
index 0000000000..fe57aa51f2
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/newlib.py
@@ -0,0 +1,13 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class NewlibTest(OESelftestTestCase):
11 def test_newlib(self):
12 self.write_config('TCLIBC = "newlib"')
13 bitbake("newlib libgloss")
diff --git a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
index 802a91a488..042ccdd2b4 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/buildhistory.py
@@ -1,8 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
8import sys
6from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
7import tempfile 10import tempfile
8import operator 11import operator
@@ -11,15 +14,14 @@ from oeqa.utils.commands import get_bb_var
11class TestBlobParsing(OESelftestTestCase): 14class TestBlobParsing(OESelftestTestCase):
12 15
13 def setUp(self): 16 def setUp(self):
14 import time
15 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', 17 self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory',
16 dir=get_bb_var('TOPDIR')) 18 dir=get_bb_var('TOPDIR'))
17 19
18 try: 20 try:
19 from git import Repo 21 from git import Repo
20 self.repo = Repo.init(self.repo_path) 22 self.repo = Repo.init(self.repo_path)
21 except ImportError: 23 except ImportError as e:
22 self.skipTest('Python module GitPython is not present') 24 self.skipTest('Python module GitPython is not present (%s) (%s)' % (e, sys.path))
23 25
24 self.test_file = "test" 26 self.test_file = "test"
25 self.var_map = {} 27 self.var_map = {}
@@ -28,6 +30,16 @@ class TestBlobParsing(OESelftestTestCase):
28 import shutil 30 import shutil
29 shutil.rmtree(self.repo_path) 31 shutil.rmtree(self.repo_path)
30 32
33 @property
34 def heads_default(self):
35 """
36 Support repos defaulting to master or to main branch
37 """
38 try:
39 return self.repo.heads.main
40 except AttributeError:
41 return self.repo.heads.master
42
31 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): 43 def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"):
32 if len(to_add) == 0 and len(to_remove) == 0: 44 if len(to_add) == 0 and len(to_remove) == 0:
33 return 45 return
@@ -65,10 +77,10 @@ class TestBlobParsing(OESelftestTestCase):
65 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} 77 changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")}
66 78
67 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) 79 self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" })
68 blob1 = self.repo.heads.master.commit.tree.blobs[0] 80 blob1 = self.heads_default.commit.tree.blobs[0]
69 81
70 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) 82 self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" })
71 blob2 = self.repo.heads.master.commit.tree.blobs[0] 83 blob2 = self.heads_default.commit.tree.blobs[0]
72 84
73 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 85 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
74 blob1, blob2, False, False) 86 blob1, blob2, False, False)
@@ -84,10 +96,10 @@ class TestBlobParsing(OESelftestTestCase):
84 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} 96 defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]}
85 97
86 self.commit_vars(to_add = { "foo" : "1" }) 98 self.commit_vars(to_add = { "foo" : "1" })
87 blob1 = self.repo.heads.master.commit.tree.blobs[0] 99 blob1 = self.heads_default.commit.tree.blobs[0]
88 100
89 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) 101 self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" })
90 blob2 = self.repo.heads.master.commit.tree.blobs[0] 102 blob2 = self.heads_default.commit.tree.blobs[0]
91 103
92 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), 104 change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file),
93 blob1, blob2, False, False) 105 blob1, blob2, False, False)
diff --git a/meta/lib/oeqa/selftest/cases/oelib/elf.py b/meta/lib/oeqa/selftest/cases/oelib/elf.py
index 5a5f9b4fdf..7bf550b6fd 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/elf.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/elf.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/license.py b/meta/lib/oeqa/selftest/cases/oelib/license.py
index 6ebbee589f..5eea12e761 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/license.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/license.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -15,11 +17,11 @@ class SeenVisitor(oe.license.LicenseVisitor):
15 17
16class TestSingleLicense(TestCase): 18class TestSingleLicense(TestCase):
17 licenses = [ 19 licenses = [
18 "GPLv2", 20 "GPL-2.0-only",
19 "LGPL-2.0", 21 "LGPL-2.0-only",
20 "Artistic", 22 "Artistic-1.0",
21 "MIT", 23 "MIT",
22 "GPLv3+", 24 "GPL-3.0-or-later",
23 "FOO_BAR", 25 "FOO_BAR",
24 ] 26 ]
25 invalid_licenses = ["GPL/BSD"] 27 invalid_licenses = ["GPL/BSD"]
@@ -67,9 +69,9 @@ class TestComplexCombinations(TestSimpleCombinations):
67 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], 69 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
68 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], 70 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
69 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], 71 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
70 "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], 72 "(GPL-2.0-only|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0-only", "BSD-4-clause", "MIT"],
71 } 73 }
72 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] 74 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0-only"]
73 75
74class TestIsIncluded(TestCase): 76class TestIsIncluded(TestCase):
75 tests = { 77 tests = {
@@ -87,12 +89,12 @@ class TestIsIncluded(TestCase):
87 [True, ["BAR", "FOOBAR"]], 89 [True, ["BAR", "FOOBAR"]],
88 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"): 90 ("(FOO | BAR) & FOOBAR | BAZ & MOO & BARFOO", None, "FOO"):
89 [True, ["BAZ", "MOO", "BARFOO"]], 91 [True, ["BAZ", "MOO", "BARFOO"]],
90 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, None): 92 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, None):
91 [True, ["GPL-3.0", "GPL-2.0", "LGPL-2.1"]], 93 [True, ["GPL-3.0-or-later", "GPL-2.0-only", "LGPL-2.1-only"]],
92 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0"): 94 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later"):
93 [True, ["Proprietary"]], 95 [True, ["Proprietary"]],
94 ("GPL-3.0 & GPL-2.0 & LGPL-2.1 | Proprietary", None, "GPL-3.0 Proprietary"): 96 ("GPL-3.0-or-later & GPL-2.0-only & LGPL-2.1-only | Proprietary", None, "GPL-3.0-or-later Proprietary"):
95 [False, ["GPL-3.0"]] 97 [False, ["GPL-3.0-or-later"]]
96 } 98 }
97 99
98 def test_tests(self): 100 def test_tests(self):
diff --git a/meta/lib/oeqa/selftest/cases/oelib/path.py b/meta/lib/oeqa/selftest/cases/oelib/path.py
index a1cfa08c09..b963e447e3 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/path.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/path.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/types.py b/meta/lib/oeqa/selftest/cases/oelib/types.py
index 7eb49e6f95..58318b18b2 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/types.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/types.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/oelib/utils.py b/meta/lib/oeqa/selftest/cases/oelib/utils.py
index a7214beb4c..0cb46425a0 100644
--- a/meta/lib/oeqa/selftest/cases/oelib/utils.py
+++ b/meta/lib/oeqa/selftest/cases/oelib/utils.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -64,7 +66,7 @@ class TestMultiprocessLaunch(TestCase):
64 import bb 66 import bb
65 67
66 def testfunction(item, d): 68 def testfunction(item, d):
67 if item == "2" or item == "1": 69 if item == "2":
68 raise KeyError("Invalid number %s" % item) 70 raise KeyError("Invalid number %s" % item)
69 return "Found %s" % item 71 return "Found %s" % item
70 72
@@ -99,5 +101,4 @@ class TestMultiprocessLaunch(TestCase):
99 # Assert the function prints exceptions 101 # Assert the function prints exceptions
100 with captured_output() as (out, err): 102 with captured_output() as (out, err):
101 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,)) 103 self.assertRaises(bb.BBHandledException, multiprocess_launch, testfunction, ["1", "2", "3", "4", "5", "6"], d, extraargs=(d,))
102 self.assertIn("KeyError: 'Invalid number 1'", out.getvalue())
103 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue()) 104 self.assertIn("KeyError: 'Invalid number 2'", out.getvalue())
diff --git a/meta/lib/oeqa/selftest/cases/oescripts.py b/meta/lib/oeqa/selftest/cases/oescripts.py
index 8a10ff357b..f69efccfee 100644
--- a/meta/lib/oeqa/selftest/cases/oescripts.py
+++ b/meta/lib/oeqa/selftest/cases/oescripts.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -8,7 +10,7 @@ import importlib
8import unittest 10import unittest
9from oeqa.selftest.case import OESelftestTestCase 11from oeqa.selftest.case import OESelftestTestCase
10from oeqa.selftest.cases.buildhistory import BuildhistoryBase 12from oeqa.selftest.cases.buildhistory import BuildhistoryBase
11from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer 13from oeqa.utils.commands import runCmd, bitbake, get_bb_var
12from oeqa.utils import CommandError 14from oeqa.utils import CommandError
13 15
14class BuildhistoryDiffTests(BuildhistoryBase): 16class BuildhistoryDiffTests(BuildhistoryBase):
@@ -21,7 +23,7 @@ class BuildhistoryDiffTests(BuildhistoryBase):
21 pkgv = result.output.rstrip() 23 pkgv = result.output.rstrip()
22 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR')) 24 result = runCmd("buildhistory-diff -p %s" % get_bb_var('BUILDHISTORY_DIR'))
23 expected_endlines = [ 25 expected_endlines = [
24 "xcursor-transparent-theme-dev: RDEPENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv), 26 "xcursor-transparent-theme-dev: RRECOMMENDS: removed \"xcursor-transparent-theme (['= %s-r1'])\", added \"xcursor-transparent-theme (['= %s-r0'])\"" % (pkgv, pkgv),
25 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv) 27 "xcursor-transparent-theme-staticdev: RDEPENDS: removed \"xcursor-transparent-theme-dev (['= %s-r1'])\", added \"xcursor-transparent-theme-dev (['= %s-r0'])\"" % (pkgv, pkgv)
26 ] 28 ]
27 for line in result.output.splitlines(): 29 for line in result.output.splitlines():
@@ -35,19 +37,15 @@ class BuildhistoryDiffTests(BuildhistoryBase):
35 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines)) 37 self.fail('Missing expected line endings:\n %s' % '\n '.join(expected_endlines))
36 38
37@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present") 39@unittest.skipUnless(importlib.util.find_spec("cairo"), "Python cairo module is not present")
38class OEScriptTests(OESelftestTestCase): 40class OEPybootchartguyTests(OESelftestTestCase):
39 41
40 @classmethod 42 @classmethod
41 def setUpClass(cls): 43 def setUpClass(cls):
42 super(OEScriptTests, cls).setUpClass() 44 super().setUpClass()
43 import cairo
44 bitbake("core-image-minimal -c rootfs -f") 45 bitbake("core-image-minimal -c rootfs -f")
45 cls.tmpdir = get_bb_var('TMPDIR') 46 cls.tmpdir = get_bb_var('TMPDIR')
46 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1] 47 cls.buildstats = cls.tmpdir + "/buildstats/" + sorted(os.listdir(cls.tmpdir + "/buildstats"))[-1]
47 48 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
48 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
49
50class OEPybootchartguyTests(OEScriptTests):
51 49
52 def test_pybootchartguy_help(self): 50 def test_pybootchartguy_help(self):
53 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir) 51 runCmd('%s/pybootchartgui/pybootchartgui.py --help' % self.scripts_dir)
@@ -67,7 +65,10 @@ class OEPybootchartguyTests(OEScriptTests):
67 65
68class OEGitproxyTests(OESelftestTestCase): 66class OEGitproxyTests(OESelftestTestCase):
69 67
70 scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts') 68 @classmethod
69 def setUpClass(cls):
70 super().setUpClass()
71 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
71 72
72 def test_oegitproxy_help(self): 73 def test_oegitproxy_help(self):
73 try: 74 try:
@@ -125,15 +126,22 @@ class OEGitproxyTests(OESelftestTestCase):
125class OeRunNativeTest(OESelftestTestCase): 126class OeRunNativeTest(OESelftestTestCase):
126 def test_oe_run_native(self): 127 def test_oe_run_native(self):
127 bitbake("qemu-helper-native -c addto_recipe_sysroot") 128 bitbake("qemu-helper-native -c addto_recipe_sysroot")
128 result = runCmd("oe-run-native qemu-helper-native tunctl -h") 129 result = runCmd("oe-run-native qemu-helper-native qemu-oe-bridge-helper --help")
129 self.assertIn("Delete: tunctl -d device-name [-f tun-clone-device]", result.output) 130 self.assertIn("Helper function to find and exec qemu-bridge-helper", result.output)
131
132class OEListPackageconfigTests(OESelftestTestCase):
133
134 @classmethod
135 def setUpClass(cls):
136 super().setUpClass()
137 cls.scripts_dir = os.path.join(get_bb_var('COREBASE'), 'scripts')
130 138
131class OEListPackageconfigTests(OEScriptTests):
132 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags 139 #oe-core.scripts.List_all_the_PACKAGECONFIG's_flags
133 def check_endlines(self, results, expected_endlines): 140 def check_endlines(self, results, expected_endlines):
134 for line in results.output.splitlines(): 141 for line in results.output.splitlines():
135 for el in expected_endlines: 142 for el in expected_endlines:
136 if line.split() == el.split(): 143 if line and line.split()[0] == el.split()[0] and \
144 ' '.join(sorted(el.split())) in ' '.join(sorted(line.split())):
137 expected_endlines.remove(el) 145 expected_endlines.remove(el)
138 break 146 break
139 147
@@ -149,8 +157,8 @@ class OEListPackageconfigTests(OEScriptTests):
149 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir) 157 results = runCmd('%s/contrib/list-packageconfig-flags.py' % self.scripts_dir)
150 expected_endlines = [] 158 expected_endlines = []
151 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 159 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
152 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 160 expected_endlines.append("pinentry gtk2 ncurses qt secret")
153 expected_endlines.append("tar acl") 161 expected_endlines.append("tar acl selinux")
154 162
155 self.check_endlines(results, expected_endlines) 163 self.check_endlines(results, expected_endlines)
156 164
@@ -167,11 +175,10 @@ class OEListPackageconfigTests(OEScriptTests):
167 def test_packageconfig_flags_option_all(self): 175 def test_packageconfig_flags_option_all(self):
168 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir) 176 results = runCmd('%s/contrib/list-packageconfig-flags.py -a' % self.scripts_dir)
169 expected_endlines = [] 177 expected_endlines = []
170 expected_endlines.append("pinentry-1.1.1") 178 expected_endlines.append("pinentry-1.2.1")
171 expected_endlines.append("PACKAGECONFIG ncurses libcap") 179 expected_endlines.append("PACKAGECONFIG ncurses")
172 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase") 180 expected_endlines.append("PACKAGECONFIG[qt] --enable-pinentry-qt, --disable-pinentry-qt, qtbase-native qtbase")
173 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0") 181 expected_endlines.append("PACKAGECONFIG[gtk2] --enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0")
174 expected_endlines.append("PACKAGECONFIG[libcap] --with-libcap, --without-libcap, libcap")
175 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses") 182 expected_endlines.append("PACKAGECONFIG[ncurses] --enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses")
176 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret") 183 expected_endlines.append("PACKAGECONFIG[secret] --enable-libsecret, --disable-libsecret, libsecret")
177 184
@@ -181,7 +188,7 @@ class OEListPackageconfigTests(OEScriptTests):
181 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir) 188 results = runCmd('%s/contrib/list-packageconfig-flags.py -p' % self.scripts_dir)
182 expected_endlines = [] 189 expected_endlines = []
183 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS") 190 expected_endlines.append("RECIPE NAME PACKAGECONFIG FLAGS")
184 expected_endlines.append("pinentry gtk2 libcap ncurses qt secret") 191 expected_endlines.append("pinentry gtk2 ncurses qt secret")
185 192
186 self.check_endlines(results, expected_endlines) 193 self.check_endlines(results, expected_endlines)
187 194
diff --git a/meta/lib/oeqa/selftest/cases/overlayfs.py b/meta/lib/oeqa/selftest/cases/overlayfs.py
new file mode 100644
index 0000000000..e31063567b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/overlayfs.py
@@ -0,0 +1,502 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake, runqemu
9from oeqa.core.decorator import OETestTag
10from oeqa.core.decorator.data import skipIfNotMachine
11
12def getline_qemu(out, line):
13 for l in out.split('\n'):
14 if line in l:
15 return l
16
17def getline(res, line):
18 return getline_qemu(res.output, line)
19
20class OverlayFSTests(OESelftestTestCase):
21 """Overlayfs class usage tests"""
22
23 def add_overlay_conf_to_machine(self):
24 machine_inc = """
25OVERLAYFS_MOUNT_POINT[mnt-overlay] = "/mnt/overlay"
26"""
27 self.set_machine_config(machine_inc)
28
29 def test_distro_features_missing(self):
30 """
31 Summary: Check that required DISTRO_FEATURES are set
32 Expected: Fail when either systemd or overlayfs are not in DISTRO_FEATURES
33 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
34 """
35
36 config = """
37IMAGE_INSTALL:append = " overlayfs-user"
38"""
39 overlayfs_recipe_append = """
40inherit overlayfs
41"""
42 self.write_config(config)
43 self.add_overlay_conf_to_machine()
44 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
45
46 res = bitbake('core-image-minimal', ignore_status=True)
47 line = getline(res, "overlayfs-user was skipped: missing required distro features")
48 self.assertTrue("overlayfs" in res.output, msg=res.output)
49 self.assertTrue("systemd" in res.output, msg=res.output)
50 self.assertTrue("ERROR: Required build target 'core-image-minimal' has no buildable providers." in res.output, msg=res.output)
51
52 def test_not_all_units_installed(self):
53 """
54 Summary: Test QA check that we have required mount units in the image
55 Expected: Fail because mount unit for overlay partition is not installed
56 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
57 """
58
59 config = """
60IMAGE_INSTALL:append = " overlayfs-user"
61DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
62"""
63
64 self.write_config(config)
65 self.add_overlay_conf_to_machine()
66
67 res = bitbake('core-image-minimal', ignore_status=True)
68 line = getline(res, " Mount path /mnt/overlay not found in fstab and unit mnt-overlay.mount not found in systemd unit directories")
69 self.assertTrue(line and line.startswith("WARNING:"), msg=res.output)
70 line = getline(res, "Not all mount paths and units are installed in the image")
71 self.assertTrue(line and line.startswith("ERROR:"), msg=res.output)
72
73 def test_not_all_units_installed_but_qa_skipped(self):
74 """
75 Summary: Test skipping the QA check
76 Expected: Image is created successfully
77 Author: Claudius Heine <ch@denx.de>
78 """
79
80 config = """
81IMAGE_INSTALL:append = " overlayfs-user"
82DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
83OVERLAYFS_QA_SKIP[mnt-overlay] = "mount-configured"
84"""
85
86 self.write_config(config)
87 self.add_overlay_conf_to_machine()
88
89 bitbake('core-image-minimal')
90
91 def test_mount_unit_not_set(self):
92 """
93 Summary: Test whether mount unit was set properly
94 Expected: Fail because mount unit was not set
95 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
96 """
97
98 config = """
99IMAGE_INSTALL:append = " overlayfs-user"
100DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
101"""
102
103 self.write_config(config)
104
105 res = bitbake('core-image-minimal', ignore_status=True)
106 line = getline(res, "A recipe uses overlayfs class but there is no OVERLAYFS_MOUNT_POINT set in your MACHINE configuration")
107 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
108
109 def test_wrong_mount_unit_set(self):
110 """
111 Summary: Test whether mount unit was set properly
112 Expected: Fail because not the correct flag used for mount unit
113 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
114 """
115
116 config = """
117IMAGE_INSTALL:append = " overlayfs-user"
118DISTRO_FEATURES:append = " systemd overlayfs usrmerge"
119"""
120
121 wrong_machine_config = """
122OVERLAYFS_MOUNT_POINT[usr-share-overlay] = "/usr/share/overlay"
123"""
124
125 self.write_config(config)
126 self.set_machine_config(wrong_machine_config)
127
128 res = bitbake('core-image-minimal', ignore_status=True)
129 line = getline(res, "Missing required mount point for OVERLAYFS_MOUNT_POINT[mnt-overlay] in your MACHINE configuration")
130 self.assertTrue(line and line.startswith("Parsing recipes...ERROR:"), msg=res.output)
131
132 def _test_correct_image(self, recipe, data):
133 """
134 Summary: Check that we can create an image when all parameters are
135 set correctly
136 Expected: Image is created successfully
137 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
138 """
139
140 config = """
141IMAGE_INSTALL:append = " overlayfs-user systemd-machine-units"
142DISTRO_FEATURES:append = " overlayfs"
143
144# Use systemd as init manager
145INIT_MANAGER = "systemd"
146
147# enable overlayfs in the kernel
148KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
149"""
150
151 overlayfs_recipe_append = """
152OVERLAYFS_WRITABLE_PATHS[mnt-overlay] += "/usr/share/another-overlay-mount"
153
154SYSTEMD_SERVICE:${PN} += " \
155 my-application.service \
156"
157
158do_install:append() {
159 install -d ${D}${systemd_system_unitdir}
160 cat <<EOT > ${D}${systemd_system_unitdir}/my-application.service
161[Unit]
162Description=Sample application start-up unit
163After=overlayfs-user-overlays.service
164Requires=overlayfs-user-overlays.service
165
166[Service]
167Type=oneshot
168ExecStart=/bin/true
169RemainAfterExit=true
170
171[Install]
172WantedBy=multi-user.target
173EOT
174}
175"""
176
177 self.write_config(config)
178 self.add_overlay_conf_to_machine()
179 self.write_recipeinc(recipe, data)
180 self.write_recipeinc('overlayfs-user', overlayfs_recipe_append)
181
182 bitbake('core-image-minimal')
183
184 with runqemu('core-image-minimal') as qemu:
185 # Check that application service started
186 status, output = qemu.run_serial("systemctl status my-application")
187 self.assertTrue("active (exited)" in output, msg=output)
188
189 # Check that overlay mounts are dependencies of our application unit
190 status, output = qemu.run_serial("systemctl list-dependencies my-application")
191 self.assertTrue("overlayfs-user-overlays.service" in output, msg=output)
192
193 status, output = qemu.run_serial("systemctl list-dependencies overlayfs-user-overlays")
194 self.assertTrue("usr-share-another\\x2doverlay\\x2dmount.mount" in output, msg=output)
195 self.assertTrue("usr-share-my\\x2dapplication.mount" in output, msg=output)
196
197 # Check that we have /mnt/overlay fs mounted as tmpfs and
198 # /usr/share/my-application as an overlay (see overlayfs-user recipe)
199 status, output = qemu.run_serial("/bin/mount -t tmpfs,overlay")
200
201 line = getline_qemu(output, "on /mnt/overlay")
202 self.assertTrue(line and line.startswith("tmpfs"), msg=output)
203
204 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/my-application")
205 self.assertTrue(line and line.startswith("overlay"), msg=output)
206
207 line = getline_qemu(output, "upperdir=/mnt/overlay/upper/usr/share/another-overlay-mount")
208 self.assertTrue(line and line.startswith("overlay"), msg=output)
209
210 @OETestTag("runqemu")
211 def test_correct_image_fstab(self):
212 """
213 Summary: Check that we can create an image when all parameters are
214 set correctly via fstab
215 Expected: Image is created successfully
216 Author: Stefan Herbrechtsmeier <stefan.herbrechtsmeier@weidmueller.com>
217 """
218
219 base_files_append = """
220do_install:append() {
221 cat <<EOT >> ${D}${sysconfdir}/fstab
222tmpfs /mnt/overlay tmpfs mode=1777,strictatime,nosuid,nodev 0 0
223EOT
224}
225"""
226
227 self._test_correct_image('base-files', base_files_append)
228
229 @OETestTag("runqemu")
230 def test_correct_image_unit(self):
231 """
232 Summary: Check that we can create an image when all parameters are
233 set correctly via mount unit
234 Expected: Image is created successfully
235 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
236 """
237
238 systemd_machine_unit_append = """
239SYSTEMD_SERVICE:${PN} += " \
240 mnt-overlay.mount \
241"
242
243do_install:append() {
244 install -d ${D}${systemd_system_unitdir}
245 cat <<EOT > ${D}${systemd_system_unitdir}/mnt-overlay.mount
246[Unit]
247Description=Tmpfs directory
248DefaultDependencies=no
249
250[Mount]
251What=tmpfs
252Where=/mnt/overlay
253Type=tmpfs
254Options=mode=1777,strictatime,nosuid,nodev
255
256[Install]
257WantedBy=multi-user.target
258EOT
259}
260
261"""
262
263 self._test_correct_image('systemd-machine-units', systemd_machine_unit_append)
264
265@OETestTag("runqemu")
266class OverlayFSEtcRunTimeTests(OESelftestTestCase):
267 """overlayfs-etc class tests"""
268
269 def test_all_required_variables_set(self):
270 """
271 Summary: Check that required variables are set
272 Expected: Fail when any of required variables is missing
273 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
274 """
275
276 configBase = """
277# Use systemd as init manager
278INIT_MANAGER = "systemd"
279
280# enable overlayfs in the kernel
281KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
282
283# Image configuration for overlayfs-etc
284EXTRA_IMAGE_FEATURES += "overlayfs-etc"
285IMAGE_FEATURES:remove = "package-management"
286"""
287 configMountPoint = """
288OVERLAYFS_ETC_MOUNT_POINT = "/data"
289"""
290 configDevice = """
291OVERLAYFS_ETC_DEVICE = "/dev/mmcblk0p1"
292"""
293
294 self.write_config(configBase)
295 res = bitbake('core-image-minimal', ignore_status=True)
296 line = getline(res, "OVERLAYFS_ETC_MOUNT_POINT must be set in your MACHINE configuration")
297 self.assertTrue(line, msg=res.output)
298
299 self.append_config(configMountPoint)
300 res = bitbake('core-image-minimal', ignore_status=True)
301 line = getline(res, "OVERLAYFS_ETC_DEVICE must be set in your MACHINE configuration")
302 self.assertTrue(line, msg=res.output)
303
304 self.append_config(configDevice)
305 res = bitbake('core-image-minimal', ignore_status=True)
306 line = getline(res, "OVERLAYFS_ETC_FSTYPE should contain a valid file system type on /dev/mmcblk0p1")
307 self.assertTrue(line, msg=res.output)
308
309 def test_image_feature_conflict(self):
310 """
311 Summary: Overlayfs-etc is not allowed to be used with package-management
312 Expected: Feature conflict
313 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
314 """
315
316 config = """
317# Use systemd as init manager
318INIT_MANAGER = "systemd"
319
320# enable overlayfs in the kernel
321KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
322EXTRA_IMAGE_FEATURES += "overlayfs-etc"
323EXTRA_IMAGE_FEATURES += "package-management"
324"""
325
326 self.write_config(config)
327
328 res = bitbake('core-image-minimal', ignore_status=True)
329 line = getline(res, "contains conflicting IMAGE_FEATURES")
330 self.assertTrue("overlayfs-etc" in res.output, msg=res.output)
331 self.assertTrue("package-management" in res.output, msg=res.output)
332
333 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
334 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
335 def test_image_feature_is_missing(self):
336 """
337 Summary: Overlayfs-etc class is not applied when image feature is not set
338 Expected: Image is created successfully but /etc is not an overlay
339 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
340 """
341
342 config = """
343# Use systemd as init manager
344INIT_MANAGER = "systemd"
345
346# enable overlayfs in the kernel
347KERNEL_EXTRA_FEATURES:append = " features/overlayfs/overlayfs.scc"
348
349IMAGE_FSTYPES += "wic"
350WKS_FILE = "overlayfs_etc.wks.in"
351
352EXTRA_IMAGE_FEATURES += "read-only-rootfs"
353# Image configuration for overlayfs-etc
354OVERLAYFS_ETC_MOUNT_POINT = "/data"
355OVERLAYFS_ETC_DEVICE = "/dev/sda3"
356OVERLAYFS_ROOTFS_TYPE = "ext4"
357"""
358
359 self.write_config(config)
360
361 bitbake('core-image-minimal')
362
363 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
364 status, output = qemu.run_serial("/bin/mount")
365
366 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
367 self.assertFalse(line, msg=output)
368
369 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
370 def test_sbin_init_preinit(self):
371 self.run_sbin_init(False, "ext4")
372
373 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
374 def test_sbin_init_original(self):
375 self.run_sbin_init(True, "ext4")
376
377 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
378 def test_sbin_init_read_only(self):
379 self.run_sbin_init(True, "squashfs")
380
381 def run_sbin_init(self, origInit, rootfsType):
382 """
383 Summary: Confirm we can replace original init and mount overlay on top of /etc
384 Expected: Image is created successfully and /etc is mounted as an overlay
385 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
386 """
387
388 config = self.get_working_config()
389
390 args = {
391 'OVERLAYFS_INIT_OPTION': "" if origInit else "init=/sbin/preinit",
392 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': int(origInit == True),
393 'OVERLAYFS_ROOTFS_TYPE': rootfsType,
394 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': int(rootfsType == "ext4")
395 }
396
397 self.write_config(config.format(**args))
398
399 bitbake('core-image-minimal')
400 testFile = "/etc/my-test-data"
401
402 with runqemu('core-image-minimal', image_fstype='wic', discard_writes=False) as qemu:
403 status, output = qemu.run_serial("/bin/mount")
404
405 line = getline_qemu(output, "/dev/sda3")
406 self.assertTrue("/data" in output, msg=output)
407
408 line = getline_qemu(output, "upperdir=/data/overlay-etc/upper")
409 self.assertTrue(line and line.startswith("/data/overlay-etc/upper on /etc type overlay"), msg=output)
410
411 # check that lower layer is not available
412 status, output = qemu.run_serial("ls -1 /data/overlay-etc/lower")
413 line = getline_qemu(output, "No such file or directory")
414 self.assertTrue(line, msg=output)
415
416 status, output = qemu.run_serial("touch " + testFile)
417 status, output = qemu.run_serial("sync")
418 status, output = qemu.run_serial("ls -1 " + testFile)
419 line = getline_qemu(output, testFile)
420 self.assertTrue(line and line.startswith(testFile), msg=output)
421
422 # Check that file exists in /etc after reboot
423 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
424 status, output = qemu.run_serial("ls -1 " + testFile)
425 line = getline_qemu(output, testFile)
426 self.assertTrue(line and line.startswith(testFile), msg=output)
427
428 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
429 def test_lower_layer_access(self):
430 """
431 Summary: Test that lower layer of /etc is available read-only when configured
432 Expected: Can't write to lower layer. The files on lower and upper different after
433 modification
434 Author: Vyacheslav Yurkov <uvv.mail@gmail.com>
435 """
436
437 config = self.get_working_config()
438
439 configLower = """
440OVERLAYFS_ETC_EXPOSE_LOWER = "1"
441IMAGE_INSTALL:append = " overlayfs-user"
442"""
443 testFile = "lower-layer-test.txt"
444
445 args = {
446 'OVERLAYFS_INIT_OPTION': "",
447 'OVERLAYFS_ETC_USE_ORIG_INIT_NAME': 1,
448 'OVERLAYFS_ROOTFS_TYPE': "ext4",
449 'OVERLAYFS_ETC_CREATE_MOUNT_DIRS': 1
450 }
451
452 self.write_config(config.format(**args))
453
454 self.append_config(configLower)
455 bitbake('core-image-minimal')
456
457 with runqemu('core-image-minimal', image_fstype='wic') as qemu:
458 status, output = qemu.run_serial("echo \"Modified in upper\" > /etc/" + testFile)
459 status, output = qemu.run_serial("diff /etc/" + testFile + " /data/overlay-etc/lower/" + testFile)
460 line = getline_qemu(output, "Modified in upper")
461 self.assertTrue(line, msg=output)
462 line = getline_qemu(output, "Original file")
463 self.assertTrue(line, msg=output)
464
465 status, output = qemu.run_serial("touch /data/overlay-etc/lower/ro-test.txt")
466 line = getline_qemu(output, "Read-only file system")
467 self.assertTrue(line, msg=output)
468
469 def get_working_config(self):
470 return """
471# Use systemd as init manager
472INIT_MANAGER = "systemd"
473
474# enable overlayfs in the kernel
475KERNEL_EXTRA_FEATURES:append = " \
476 features/overlayfs/overlayfs.scc \
477 cfg/fs/squashfs.scc"
478
479IMAGE_FSTYPES += "wic"
480OVERLAYFS_INIT_OPTION = "{OVERLAYFS_INIT_OPTION}"
481OVERLAYFS_ROOTFS_TYPE = "{OVERLAYFS_ROOTFS_TYPE}"
482OVERLAYFS_ETC_CREATE_MOUNT_DIRS = "{OVERLAYFS_ETC_CREATE_MOUNT_DIRS}"
483WKS_FILE = "overlayfs_etc.wks.in"
484
485EXTRA_IMAGE_FEATURES += "read-only-rootfs"
486# Image configuration for overlayfs-etc
487EXTRA_IMAGE_FEATURES += "overlayfs-etc"
488IMAGE_FEATURES:remove = "package-management"
489OVERLAYFS_ETC_MOUNT_POINT = "/data"
490OVERLAYFS_ETC_FSTYPE = "ext4"
491OVERLAYFS_ETC_DEVICE = "/dev/sda3"
492OVERLAYFS_ETC_USE_ORIG_INIT_NAME = "{OVERLAYFS_ETC_USE_ORIG_INIT_NAME}"
493
494ROOTFS_POSTPROCESS_COMMAND += "{OVERLAYFS_ROOTFS_TYPE}_rootfs"
495
496ext4_rootfs() {{
497}}
498
499squashfs_rootfs() {{
500 mkdir -p ${{IMAGE_ROOTFS}}/data
501}}
502"""
diff --git a/meta/lib/oeqa/selftest/cases/package.py b/meta/lib/oeqa/selftest/cases/package.py
index 7166c3991f..1aa6c03f8a 100644
--- a/meta/lib/oeqa/selftest/cases/package.py
+++ b/meta/lib/oeqa/selftest/cases/package.py
@@ -1,10 +1,11 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu 8from oeqa.utils.commands import bitbake, get_bb_vars, get_bb_var, runqemu
7import stat
8import subprocess, os 9import subprocess, os
9import oe.path 10import oe.path
10import re 11import re
@@ -88,6 +89,13 @@ class VersionOrdering(OESelftestTestCase):
88 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort)) 89 self.assertEqual(status - 100, sort, "%s %s (%d) failed" % (ver1, ver2, sort))
89 90
90class PackageTests(OESelftestTestCase): 91class PackageTests(OESelftestTestCase):
92 # Verify that a recipe cannot rename a package into an existing one
93 def test_package_name_conflict(self):
94 res = bitbake("packagenameconflict", ignore_status=True)
95 self.assertNotEqual(res.status, 0)
96 err = "package name already exists"
97 self.assertTrue(err in res.output)
98
91 # Verify that a recipe which sets up hardlink files has those preserved into split packages 99 # Verify that a recipe which sets up hardlink files has those preserved into split packages
92 # Also test file sparseness is preserved 100 # Also test file sparseness is preserved
93 def test_preserve_sparse_hardlinks(self): 101 def test_preserve_sparse_hardlinks(self):
@@ -116,9 +124,9 @@ class PackageTests(OESelftestTestCase):
116 124
117 # Verify gdb to read symbols from separated debug hardlink file correctly 125 # Verify gdb to read symbols from separated debug hardlink file correctly
118 def test_gdb_hardlink_debug(self): 126 def test_gdb_hardlink_debug(self):
119 features = 'IMAGE_INSTALL_append = " selftest-hardlink"\n' 127 features = 'IMAGE_INSTALL:append = " selftest-hardlink"\n'
120 features += 'IMAGE_INSTALL_append = " selftest-hardlink-dbg"\n' 128 features += 'IMAGE_INSTALL:append = " selftest-hardlink-dbg"\n'
121 features += 'IMAGE_INSTALL_append = " selftest-hardlink-gdb"\n' 129 features += 'IMAGE_INSTALL:append = " selftest-hardlink-gdb"\n'
122 self.write_config(features) 130 self.write_config(features)
123 bitbake("core-image-minimal") 131 bitbake("core-image-minimal")
124 132
@@ -134,8 +142,10 @@ class PackageTests(OESelftestTestCase):
134 self.logger.error("No debugging symbols found. GDB result:\n%s" % output) 142 self.logger.error("No debugging symbols found. GDB result:\n%s" % output)
135 return False 143 return False
136 144
137 # Check debugging symbols works correctly 145 # Check debugging symbols works correctly. Don't look for a
138 elif re.match(r"Breakpoint 1.*hello\.c.*4", l): 146 # source file as optimisation can put the breakpoint inside
147 # stdio.h.
148 elif "Breakpoint 1 at" in l:
139 return True 149 return True
140 150
141 self.logger.error("GDB result:\n%d: %s", status, output) 151 self.logger.error("GDB result:\n%d: %s", status, output)
@@ -150,25 +160,25 @@ class PackageTests(OESelftestTestCase):
150 self.fail('GDB %s failed' % binary) 160 self.fail('GDB %s failed' % binary)
151 161
152 def test_preserve_ownership(self): 162 def test_preserve_ownership(self):
153 import os, stat, oe.cachedpath 163 features = 'IMAGE_INSTALL:append = " selftest-chown"\n'
154 features = 'IMAGE_INSTALL_append = " selftest-chown"\n'
155 self.write_config(features) 164 self.write_config(features)
156 bitbake("core-image-minimal") 165 bitbake("core-image-minimal")
157 166
158 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown') 167 def check_ownership(qemu, expected_gid, expected_uid, path):
159 def check_ownership(qemu, gid, uid, path):
160 self.logger.info("Check ownership of %s", path) 168 self.logger.info("Check ownership of %s", path)
161 status, output = qemu.run_serial(r'/bin/stat -c "%U %G" ' + path, timeout=60) 169 status, output = qemu.run_serial('stat -c "%U %G" ' + path)
162 output = output.split(" ") 170 self.assertEqual(status, 1, "stat failed: " + output)
163 if output[0] != uid or output[1] != gid : 171 try:
164 self.logger.error("Incrrect ownership %s [%s:%s]", path, output[0], output[1]) 172 uid, gid = output.split()
165 return False 173 self.assertEqual(uid, expected_uid)
166 return True 174 self.assertEqual(gid, expected_gid)
175 except ValueError:
176 self.fail("Cannot parse output: " + output)
167 177
178 sysconfdir = get_bb_var('sysconfdir', 'selftest-chown')
168 with runqemu('core-image-minimal') as qemu: 179 with runqemu('core-image-minimal') as qemu:
169 for path in [ sysconfdir + "/selftest-chown/file", 180 for path in [ sysconfdir + "/selftest-chown/file",
170 sysconfdir + "/selftest-chown/dir", 181 sysconfdir + "/selftest-chown/dir",
171 sysconfdir + "/selftest-chown/symlink", 182 sysconfdir + "/selftest-chown/symlink",
172 sysconfdir + "/selftest-chown/fifotest/fifo"]: 183 sysconfdir + "/selftest-chown/fifotest/fifo"]:
173 if not check_ownership(qemu, "test", "test", path): 184 check_ownership(qemu, "test", "test", path)
174 self.fail('Test ownership %s failed' % path)
diff --git a/meta/lib/oeqa/selftest/cases/pkgdata.py b/meta/lib/oeqa/selftest/cases/pkgdata.py
index 254abc40c6..d786c33018 100644
--- a/meta/lib/oeqa/selftest/cases/pkgdata.py
+++ b/meta/lib/oeqa/selftest/cases/pkgdata.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -47,8 +49,8 @@ class OePkgdataUtilTests(OESelftestTestCase):
47 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) 49 self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output)
48 50
49 def test_find_path(self): 51 def test_find_path(self):
50 result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1') 52 result = runCmd('oe-pkgdata-util find-path /usr/lib/libz.so.1')
51 self.assertEqual(result.output, 'zlib: /lib/libz.so.1') 53 self.assertEqual(result.output, 'zlib: /usr/lib/libz.so.1')
52 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') 54 result = runCmd('oe-pkgdata-util find-path /usr/bin/m4')
53 self.assertEqual(result.output, 'm4: /usr/bin/m4') 55 self.assertEqual(result.output, 'm4: /usr/bin/m4')
54 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) 56 result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True)
@@ -120,8 +122,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
120 curpkg = line.split(':')[0] 122 curpkg = line.split(':')[0]
121 files[curpkg] = [] 123 files[curpkg] = []
122 return files 124 return files
123 bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir']) 125 bb_vars = get_bb_vars(['libdir', 'includedir', 'mandir'])
124 base_libdir = bb_vars['base_libdir']
125 libdir = bb_vars['libdir'] 126 libdir = bb_vars['libdir']
126 includedir = bb_vars['includedir'] 127 includedir = bb_vars['includedir']
127 mandir = bb_vars['mandir'] 128 mandir = bb_vars['mandir']
@@ -138,7 +139,7 @@ class OePkgdataUtilTests(OESelftestTestCase):
138 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output) 139 self.assertIn('libz1', list(files.keys()), "listed pkgs. files: %s" %result.output)
139 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output) 140 self.assertIn('libz-dev', list(files.keys()), "listed pkgs. files: %s" %result.output)
140 self.assertGreater(len(files['libz1']), 1) 141 self.assertGreater(len(files['libz1']), 1)
141 libspec = os.path.join(base_libdir, 'libz.so.1.*') 142 libspec = os.path.join(libdir, 'libz.so.1.*')
142 found = False 143 found = False
143 for fileitem in files['libz1']: 144 for fileitem in files['libz1']:
144 if fnmatch.fnmatchcase(fileitem, libspec): 145 if fnmatch.fnmatchcase(fileitem, libspec):
diff --git a/meta/lib/oeqa/selftest/cases/prservice.py b/meta/lib/oeqa/selftest/cases/prservice.py
index 578b2b4dd9..8da3739c57 100644
--- a/meta/lib/oeqa/selftest/cases/prservice.py
+++ b/meta/lib/oeqa/selftest/cases/prservice.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -12,6 +14,8 @@ from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var 14from oeqa.utils.commands import runCmd, bitbake, get_bb_var
13from oeqa.utils.network import get_free_port 15from oeqa.utils.network import get_free_port
14 16
17import bb.utils
18
15class BitbakePrTests(OESelftestTestCase): 19class BitbakePrTests(OESelftestTestCase):
16 20
17 @classmethod 21 @classmethod
@@ -19,6 +23,16 @@ class BitbakePrTests(OESelftestTestCase):
19 super(BitbakePrTests, cls).setUpClass() 23 super(BitbakePrTests, cls).setUpClass()
20 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') 24 cls.pkgdata_dir = get_bb_var('PKGDATA_DIR')
21 25
26 cls.exported_db_path = os.path.join(cls.builddir, 'export.inc')
27 cls.current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3')
28
29 def cleanup(self):
30 # Ensure any memory resident bitbake is stopped
31 bitbake("-m")
32 # Remove any existing export file or prserv database
33 bb.utils.remove(self.exported_db_path)
34 bb.utils.remove(self.current_db_path + "*")
35
22 def get_pr_version(self, package_name): 36 def get_pr_version(self, package_name):
23 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) 37 package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name)
24 package_data = ftools.read_file(package_data_file) 38 package_data = ftools.read_file(package_data_file)
@@ -40,13 +54,14 @@ class BitbakePrTests(OESelftestTestCase):
40 return str(stamps[0]) 54 return str(stamps[0])
41 55
42 def increment_package_pr(self, package_name): 56 def increment_package_pr(self, package_name):
43 inc_data = "do_package_append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now() 57 inc_data = "do_package:append() {\n bb.build.exec_func('do_test_prserv', d)\n}\ndo_test_prserv() {\necho \"The current date is: %s\" > ${PKGDESTWORK}/${PN}.datestamp\n}" % datetime.datetime.now()
44 self.write_recipeinc(package_name, inc_data) 58 self.write_recipeinc(package_name, inc_data)
45 res = bitbake(package_name, ignore_status=True) 59 res = bitbake(package_name, ignore_status=True)
46 self.delete_recipeinc(package_name) 60 self.delete_recipeinc(package_name)
47 self.assertEqual(res.status, 0, msg=res.output) 61 self.assertEqual(res.status, 0, msg=res.output)
48 62
49 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'): 63 def config_pr_tests(self, package_name, package_type='rpm', pr_socket='localhost:0'):
64 self.cleanup()
50 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type 65 config_package_data = 'PACKAGE_CLASSES = "package_%s"' % package_type
51 self.write_config(config_package_data) 66 self.write_config(config_package_data)
52 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket 67 config_server_data = 'PRSERV_HOST = "%s"' % pr_socket
@@ -66,24 +81,24 @@ class BitbakePrTests(OESelftestTestCase):
66 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 81 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
67 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1) 82 self.assertTrue(stamp_1 != stamp_2, "Different pkg rev. but same stamp: %s" % stamp_1)
68 83
84 self.cleanup()
85
69 def run_test_pr_export_import(self, package_name, replace_current_db=True): 86 def run_test_pr_export_import(self, package_name, replace_current_db=True):
70 self.config_pr_tests(package_name) 87 self.config_pr_tests(package_name)
71 88
72 self.increment_package_pr(package_name) 89 self.increment_package_pr(package_name)
73 pr_1 = self.get_pr_version(package_name) 90 pr_1 = self.get_pr_version(package_name)
74 91
75 exported_db_path = os.path.join(self.builddir, 'export.inc') 92 export_result = runCmd("bitbake-prserv-tool export %s" % self.exported_db_path, ignore_status=True)
76 export_result = runCmd("bitbake-prserv-tool export %s" % exported_db_path, ignore_status=True)
77 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output) 93 self.assertEqual(export_result.status, 0, msg="PR Service database export failed: %s" % export_result.output)
78 self.assertTrue(os.path.exists(exported_db_path)) 94 self.assertTrue(os.path.exists(self.exported_db_path), msg="%s didn't exist, tool output %s" % (self.exported_db_path, export_result.output))
79 95
80 if replace_current_db: 96 if replace_current_db:
81 current_db_path = os.path.join(get_bb_var('PERSISTENT_DIR'), 'prserv.sqlite3') 97 self.assertTrue(os.path.exists(self.current_db_path), msg="Path to current PR Service database is invalid: %s" % self.current_db_path)
82 self.assertTrue(os.path.exists(current_db_path), msg="Path to current PR Service database is invalid: %s" % current_db_path) 98 os.remove(self.current_db_path)
83 os.remove(current_db_path)
84 99
85 import_result = runCmd("bitbake-prserv-tool import %s" % exported_db_path, ignore_status=True) 100 import_result = runCmd("bitbake-prserv-tool import %s" % self.exported_db_path, ignore_status=True)
86 os.remove(exported_db_path) 101 #os.remove(self.exported_db_path)
87 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output) 102 self.assertEqual(import_result.status, 0, msg="PR Service database import failed: %s" % import_result.output)
88 103
89 self.increment_package_pr(package_name) 104 self.increment_package_pr(package_name)
@@ -91,6 +106,8 @@ class BitbakePrTests(OESelftestTestCase):
91 106
92 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1)) 107 self.assertTrue(pr_2 - pr_1 == 1, "New PR %s did not increment as expected (from %s), difference should be 1" % (pr_2, pr_1))
93 108
109 self.cleanup()
110
94 def test_import_export_replace_db(self): 111 def test_import_export_replace_db(self):
95 self.run_test_pr_export_import('m4') 112 self.run_test_pr_export_import('m4')
96 113
diff --git a/meta/lib/oeqa/selftest/cases/pseudo.py b/meta/lib/oeqa/selftest/cases/pseudo.py
index 33593d5ce9..3ef8786022 100644
--- a/meta/lib/oeqa/selftest/cases/pseudo.py
+++ b/meta/lib/oeqa/selftest/cases/pseudo.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/selftest/cases/recipetool.py b/meta/lib/oeqa/selftest/cases/recipetool.py
index 9d56e9e1e3..aebea42502 100644
--- a/meta/lib/oeqa/selftest/cases/recipetool.py
+++ b/meta/lib/oeqa/selftest/cases/recipetool.py
@@ -1,7 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
7import errno
5import os 8import os
6import shutil 9import shutil
7import tempfile 10import tempfile
@@ -25,7 +28,17 @@ def tearDownModule():
25 runCmd('rm -rf %s' % templayerdir) 28 runCmd('rm -rf %s' % templayerdir)
26 29
27 30
28class RecipetoolBase(devtool.DevtoolBase): 31def needTomllib(test):
32 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
33 try:
34 import tomllib
35 except ImportError:
36 try:
37 import tomli
38 except ImportError:
39 test.skipTest('Test requires python 3.11 or above for tomllib module or tomli module')
40
41class RecipetoolBase(devtool.DevtoolTestCase):
29 42
30 def setUpLocal(self): 43 def setUpLocal(self):
31 super(RecipetoolBase, self).setUpLocal() 44 super(RecipetoolBase, self).setUpLocal()
@@ -35,6 +48,8 @@ class RecipetoolBase(devtool.DevtoolBase):
35 self.testfile = os.path.join(self.tempdir, 'testfile') 48 self.testfile = os.path.join(self.tempdir, 'testfile')
36 with open(self.testfile, 'w') as f: 49 with open(self.testfile, 'w') as f:
37 f.write('Test file\n') 50 f.write('Test file\n')
51 config = 'BBMASK += "meta-poky/recipes-core/base-files/base-files_%.bbappend"\n'
52 self.append_config(config)
38 53
39 def tearDownLocal(self): 54 def tearDownLocal(self):
40 runCmd('rm -rf %s/recipes-*' % self.templayerdir) 55 runCmd('rm -rf %s/recipes-*' % self.templayerdir)
@@ -68,17 +83,16 @@ class RecipetoolBase(devtool.DevtoolBase):
68 return bbappendfile, result.output 83 return bbappendfile, result.output
69 84
70 85
71class RecipetoolTests(RecipetoolBase): 86class RecipetoolAppendTests(RecipetoolBase):
72 87
73 @classmethod 88 @classmethod
74 def setUpClass(cls): 89 def setUpClass(cls):
75 super(RecipetoolTests, cls).setUpClass() 90 super(RecipetoolAppendTests, cls).setUpClass()
76 # Ensure we have the right data in shlibs/pkgdata 91 # Ensure we have the right data in shlibs/pkgdata
77 cls.logger.info('Running bitbake to generate pkgdata') 92 cls.logger.info('Running bitbake to generate pkgdata')
78 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') 93 bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile')
79 bb_vars = get_bb_vars(['COREBASE', 'BBPATH']) 94 bb_vars = get_bb_vars(['COREBASE'])
80 cls.corebase = bb_vars['COREBASE'] 95 cls.corebase = bb_vars['COREBASE']
81 cls.bbpath = bb_vars['BBPATH']
82 96
83 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): 97 def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles):
84 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) 98 cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options)
@@ -94,7 +108,7 @@ class RecipetoolTests(RecipetoolBase):
94 108
95 def test_recipetool_appendfile_basic(self): 109 def test_recipetool_appendfile_basic(self):
96 # Basic test 110 # Basic test
97 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 111 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
98 '\n'] 112 '\n']
99 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd']) 113 _, output = self._try_recipetool_appendfile('base-files', '/etc/motd', self.testfile, '', expectedlines, ['motd'])
100 self.assertNotIn('WARNING: ', output) 114 self.assertNotIn('WARNING: ', output)
@@ -112,11 +126,11 @@ class RecipetoolTests(RecipetoolBase):
112 # Need a test file - should be executable 126 # Need a test file - should be executable
113 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 127 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
114 testfile2name = os.path.basename(testfile2) 128 testfile2name = os.path.basename(testfile2)
115 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 129 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
116 '\n', 130 '\n',
117 'SRC_URI += "file://%s"\n' % testfile2name, 131 'SRC_URI += "file://%s"\n' % testfile2name,
118 '\n', 132 '\n',
119 'do_install_append() {\n', 133 'do_install:append() {\n',
120 ' install -d ${D}${base_bindir}\n', 134 ' install -d ${D}${base_bindir}\n',
121 ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name, 135 ' install -m 0755 ${WORKDIR}/%s ${D}${base_bindir}/ls\n' % testfile2name,
122 '}\n'] 136 '}\n']
@@ -138,11 +152,11 @@ class RecipetoolTests(RecipetoolBase):
138 152
139 def test_recipetool_appendfile_add(self): 153 def test_recipetool_appendfile_add(self):
140 # Try arbitrary file add to a recipe 154 # Try arbitrary file add to a recipe
141 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 155 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
142 '\n', 156 '\n',
143 'SRC_URI += "file://testfile"\n', 157 'SRC_URI += "file://testfile"\n',
144 '\n', 158 '\n',
145 'do_install_append() {\n', 159 'do_install:append() {\n',
146 ' install -d ${D}${datadir}\n', 160 ' install -d ${D}${datadir}\n',
147 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 161 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
148 '}\n'] 162 '}\n']
@@ -151,13 +165,13 @@ class RecipetoolTests(RecipetoolBase):
151 # (so we're testing that, plus modifying an existing bbappend) 165 # (so we're testing that, plus modifying an existing bbappend)
152 testfile2 = os.path.join(self.corebase, 'oe-init-build-env') 166 testfile2 = os.path.join(self.corebase, 'oe-init-build-env')
153 testfile2name = os.path.basename(testfile2) 167 testfile2name = os.path.basename(testfile2)
154 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 168 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
155 '\n', 169 '\n',
156 'SRC_URI += "file://testfile \\\n', 170 'SRC_URI += "file://testfile \\\n',
157 ' file://%s \\\n' % testfile2name, 171 ' file://%s \\\n' % testfile2name,
158 ' "\n', 172 ' "\n',
159 '\n', 173 '\n',
160 'do_install_append() {\n', 174 'do_install:append() {\n',
161 ' install -d ${D}${datadir}\n', 175 ' install -d ${D}${datadir}\n',
162 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 176 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
163 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name, 177 ' install -m 0755 ${WORKDIR}/%s ${D}${datadir}/scriptname\n' % testfile2name,
@@ -166,11 +180,11 @@ class RecipetoolTests(RecipetoolBase):
166 180
167 def test_recipetool_appendfile_add_bindir(self): 181 def test_recipetool_appendfile_add_bindir(self):
168 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 182 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
169 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 183 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
170 '\n', 184 '\n',
171 'SRC_URI += "file://testfile"\n', 185 'SRC_URI += "file://testfile"\n',
172 '\n', 186 '\n',
173 'do_install_append() {\n', 187 'do_install:append() {\n',
174 ' install -d ${D}${bindir}\n', 188 ' install -d ${D}${bindir}\n',
175 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n', 189 ' install -m 0755 ${WORKDIR}/testfile ${D}${bindir}/selftest-recipetool-testbin\n',
176 '}\n'] 190 '}\n']
@@ -179,13 +193,13 @@ class RecipetoolTests(RecipetoolBase):
179 193
180 def test_recipetool_appendfile_add_machine(self): 194 def test_recipetool_appendfile_add_machine(self):
181 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable 195 # Try arbitrary file add to a recipe, this time to a location such that should be installed as executable
182 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 196 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
183 '\n', 197 '\n',
184 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n', 198 'PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
185 '\n', 199 '\n',
186 'SRC_URI_append_mymachine = " file://testfile"\n', 200 'SRC_URI:append:mymachine = " file://testfile"\n',
187 '\n', 201 '\n',
188 'do_install_append_mymachine() {\n', 202 'do_install:append:mymachine() {\n',
189 ' install -d ${D}${datadir}\n', 203 ' install -d ${D}${datadir}\n',
190 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n', 204 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/something\n',
191 '}\n'] 205 '}\n']
@@ -194,32 +208,32 @@ class RecipetoolTests(RecipetoolBase):
194 208
195 def test_recipetool_appendfile_orig(self): 209 def test_recipetool_appendfile_orig(self):
196 # A file that's in SRC_URI and in do_install with the same name 210 # A file that's in SRC_URI and in do_install with the same name
197 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 211 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
198 '\n'] 212 '\n']
199 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig']) 213 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-orig', self.testfile, '', expectedlines, ['selftest-replaceme-orig'])
200 self.assertNotIn('WARNING: ', output) 214 self.assertNotIn('WARNING: ', output)
201 215
202 def test_recipetool_appendfile_todir(self): 216 def test_recipetool_appendfile_todir(self):
203 # A file that's in SRC_URI and in do_install with destination directory rather than file 217 # A file that's in SRC_URI and in do_install with destination directory rather than file
204 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 218 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
205 '\n'] 219 '\n']
206 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir']) 220 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-todir', self.testfile, '', expectedlines, ['selftest-replaceme-todir'])
207 self.assertNotIn('WARNING: ', output) 221 self.assertNotIn('WARNING: ', output)
208 222
209 def test_recipetool_appendfile_renamed(self): 223 def test_recipetool_appendfile_renamed(self):
210 # A file that's in SRC_URI with a different name to the destination file 224 # A file that's in SRC_URI with a different name to the destination file
211 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 225 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
212 '\n'] 226 '\n']
213 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1']) 227 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-renamed', self.testfile, '', expectedlines, ['file1'])
214 self.assertNotIn('WARNING: ', output) 228 self.assertNotIn('WARNING: ', output)
215 229
216 def test_recipetool_appendfile_subdir(self): 230 def test_recipetool_appendfile_subdir(self):
217 # A file that's in SRC_URI in a subdir 231 # A file that's in SRC_URI in a subdir
218 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 232 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
219 '\n', 233 '\n',
220 'SRC_URI += "file://testfile"\n', 234 'SRC_URI += "file://testfile"\n',
221 '\n', 235 '\n',
222 'do_install_append() {\n', 236 'do_install:append() {\n',
223 ' install -d ${D}${datadir}\n', 237 ' install -d ${D}${datadir}\n',
224 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n', 238 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-subdir\n',
225 '}\n'] 239 '}\n']
@@ -228,25 +242,25 @@ class RecipetoolTests(RecipetoolBase):
228 242
229 def test_recipetool_appendfile_inst_glob(self): 243 def test_recipetool_appendfile_inst_glob(self):
230 # A file that's in do_install as a glob 244 # A file that's in do_install as a glob
231 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 245 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
232 '\n'] 246 '\n']
233 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile']) 247 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-globfile'])
234 self.assertNotIn('WARNING: ', output) 248 self.assertNotIn('WARNING: ', output)
235 249
236 def test_recipetool_appendfile_inst_todir_glob(self): 250 def test_recipetool_appendfile_inst_todir_glob(self):
237 # A file that's in do_install as a glob with destination as a directory 251 # A file that's in do_install as a glob with destination as a directory
238 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 252 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
239 '\n'] 253 '\n']
240 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile']) 254 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-todir-globfile', self.testfile, '', expectedlines, ['selftest-replaceme-inst-todir-globfile'])
241 self.assertNotIn('WARNING: ', output) 255 self.assertNotIn('WARNING: ', output)
242 256
243 def test_recipetool_appendfile_patch(self): 257 def test_recipetool_appendfile_patch(self):
244 # A file that's added by a patch in SRC_URI 258 # A file that's added by a patch in SRC_URI
245 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 259 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
246 '\n', 260 '\n',
247 'SRC_URI += "file://testfile"\n', 261 'SRC_URI += "file://testfile"\n',
248 '\n', 262 '\n',
249 'do_install_append() {\n', 263 'do_install:append() {\n',
250 ' install -d ${D}${sysconfdir}\n', 264 ' install -d ${D}${sysconfdir}\n',
251 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n', 265 ' install -m 0644 ${WORKDIR}/testfile ${D}${sysconfdir}/selftest-replaceme-patched\n',
252 '}\n'] 266 '}\n']
@@ -260,11 +274,11 @@ class RecipetoolTests(RecipetoolBase):
260 274
261 def test_recipetool_appendfile_script(self): 275 def test_recipetool_appendfile_script(self):
262 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install) 276 # Now, a file that's in SRC_URI but installed by a script (so no mention in do_install)
263 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 277 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
264 '\n', 278 '\n',
265 'SRC_URI += "file://testfile"\n', 279 'SRC_URI += "file://testfile"\n',
266 '\n', 280 '\n',
267 'do_install_append() {\n', 281 'do_install:append() {\n',
268 ' install -d ${D}${datadir}\n', 282 ' install -d ${D}${datadir}\n',
269 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n', 283 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-scripted\n',
270 '}\n'] 284 '}\n']
@@ -273,7 +287,7 @@ class RecipetoolTests(RecipetoolBase):
273 287
274 def test_recipetool_appendfile_inst_func(self): 288 def test_recipetool_appendfile_inst_func(self):
275 # A file that's installed from a function called by do_install 289 # A file that's installed from a function called by do_install
276 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 290 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
277 '\n'] 291 '\n']
278 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func']) 292 _, output = self._try_recipetool_appendfile('selftest-recipetool-appendfile', '/usr/share/selftest-replaceme-inst-func', self.testfile, '', expectedlines, ['selftest-replaceme-inst-func'])
279 self.assertNotIn('WARNING: ', output) 293 self.assertNotIn('WARNING: ', output)
@@ -283,11 +297,11 @@ class RecipetoolTests(RecipetoolBase):
283 # First try without specifying recipe 297 # First try without specifying recipe
284 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile']) 298 self._try_recipetool_appendfile_fail('/usr/share/selftest-replaceme-postinst', self.testfile, ['File /usr/share/selftest-replaceme-postinst may be written out in a pre/postinstall script of the following recipes:', 'selftest-recipetool-appendfile'])
285 # Now specify recipe 299 # Now specify recipe
286 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 300 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
287 '\n', 301 '\n',
288 'SRC_URI += "file://testfile"\n', 302 'SRC_URI += "file://testfile"\n',
289 '\n', 303 '\n',
290 'do_install_append() {\n', 304 'do_install:append() {\n',
291 ' install -d ${D}${datadir}\n', 305 ' install -d ${D}${datadir}\n',
292 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n', 306 ' install -m 0644 ${WORKDIR}/testfile ${D}${datadir}/selftest-replaceme-postinst\n',
293 '}\n'] 307 '}\n']
@@ -332,6 +346,9 @@ class RecipetoolTests(RecipetoolBase):
332 filename = try_appendfile_wc('-w') 346 filename = try_appendfile_wc('-w')
333 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend') 347 self.assertEqual(filename, recipefn.split('_')[0] + '_%.bbappend')
334 348
349
350class RecipetoolCreateTests(RecipetoolBase):
351
335 def test_recipetool_create(self): 352 def test_recipetool_create(self):
336 # Try adding a recipe 353 # Try adding a recipe
337 tempsrc = os.path.join(self.tempdir, 'srctree') 354 tempsrc = os.path.join(self.tempdir, 'srctree')
@@ -341,14 +358,13 @@ class RecipetoolTests(RecipetoolBase):
341 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) 358 result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
342 self.assertTrue(os.path.isfile(recipefile)) 359 self.assertTrue(os.path.isfile(recipefile))
343 checkvars = {} 360 checkvars = {}
344 checkvars['LICENSE'] = 'GPLv2' 361 checkvars['LICENSE'] = 'GPL-2.0-only'
345 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' 362 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'
346 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' 363 checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz'
347 checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288'
348 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' 364 checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07'
349 self._test_recipe_contents(recipefile, checkvars, []) 365 self._test_recipe_contents(recipefile, checkvars, [])
350 366
351 def test_recipetool_create_git(self): 367 def test_recipetool_create_autotools(self):
352 if 'x11' not in get_bb_var('DISTRO_FEATURES'): 368 if 'x11' not in get_bb_var('DISTRO_FEATURES'):
353 self.skipTest('Test requires x11 as distro feature') 369 self.skipTest('Test requires x11 as distro feature')
354 # Ensure we have the right data in shlibs/pkgdata 370 # Ensure we have the right data in shlibs/pkgdata
@@ -357,15 +373,15 @@ class RecipetoolTests(RecipetoolBase):
357 tempsrc = os.path.join(self.tempdir, 'srctree') 373 tempsrc = os.path.join(self.tempdir, 'srctree')
358 os.makedirs(tempsrc) 374 os.makedirs(tempsrc)
359 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb') 375 recipefile = os.path.join(self.tempdir, 'libmatchbox.bb')
360 srcuri = 'git://git.yoctoproject.org/libmatchbox' 376 srcuri = 'git://git.yoctoproject.org/libmatchbox;protocol=https'
361 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc]) 377 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri + ";rev=9f7cf8895ae2d39c465c04cc78e918c157420269", '-x', tempsrc])
362 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 378 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
363 checkvars = {} 379 checkvars = {}
364 checkvars['LICENSE'] = 'LGPLv2.1' 380 checkvars['LICENSE'] = 'LGPL-2.1-only'
365 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34' 381 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
366 checkvars['S'] = '${WORKDIR}/git' 382 checkvars['S'] = '${WORKDIR}/git'
367 checkvars['PV'] = '1.11+git${SRCPV}' 383 checkvars['PV'] = '1.11+git'
368 checkvars['SRC_URI'] = srcuri 384 checkvars['SRC_URI'] = srcuri + ';branch=master'
369 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango']) 385 checkvars['DEPENDS'] = set(['libcheck', 'libjpeg-turbo', 'libpng', 'libx11', 'libxext', 'pango'])
370 inherits = ['autotools', 'pkgconfig'] 386 inherits = ['autotools', 'pkgconfig']
371 self._test_recipe_contents(recipefile, checkvars, inherits) 387 self._test_recipe_contents(recipefile, checkvars, inherits)
@@ -374,8 +390,8 @@ class RecipetoolTests(RecipetoolBase):
374 # Try adding a recipe 390 # Try adding a recipe
375 temprecipe = os.path.join(self.tempdir, 'recipe') 391 temprecipe = os.path.join(self.tempdir, 'recipe')
376 os.makedirs(temprecipe) 392 os.makedirs(temprecipe)
377 pv = '1.7.3.0' 393 pv = '1.7.4.1'
378 srcuri = 'http://www.dest-unreach.org/socat/download/socat-%s.tar.bz2' % pv 394 srcuri = 'http://www.dest-unreach.org/socat/download/Archive/socat-%s.tar.bz2' % pv
379 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe)) 395 result = runCmd('recipetool create %s -o %s' % (srcuri, temprecipe))
380 dirlist = os.listdir(temprecipe) 396 dirlist = os.listdir(temprecipe)
381 if len(dirlist) > 1: 397 if len(dirlist) > 1:
@@ -384,7 +400,7 @@ class RecipetoolTests(RecipetoolBase):
384 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist))) 400 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
385 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named') 401 self.assertEqual(dirlist[0], 'socat_%s.bb' % pv, 'Recipe file incorrectly named')
386 checkvars = {} 402 checkvars = {}
387 checkvars['LICENSE'] = set(['Unknown', 'GPLv2']) 403 checkvars['LICENSE'] = set(['Unknown', 'GPL-2.0-only'])
388 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263']) 404 checkvars['LIC_FILES_CHKSUM'] = set(['file://COPYING.OpenSSL;md5=5c9bccc77f67a8328ef4ebaf468116f4', 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263'])
389 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot 405 # We don't check DEPENDS since they are variable for this recipe depending on what's in the sysroot
390 checkvars['S'] = None 406 checkvars['S'] = None
@@ -400,9 +416,8 @@ class RecipetoolTests(RecipetoolBase):
400 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 416 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
401 self.assertTrue(os.path.isfile(recipefile)) 417 self.assertTrue(os.path.isfile(recipefile))
402 checkvars = {} 418 checkvars = {}
403 checkvars['LICENSE'] = set(['LGPLv2.1', 'MPL-1.1']) 419 checkvars['LICENSE'] = set(['LGPL-2.1-only', 'MPL-1.1-only'])
404 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz' 420 checkvars['SRC_URI'] = 'http://taglib.github.io/releases/taglib-${PV}.tar.gz'
405 checkvars['SRC_URI[md5sum]'] = 'cee7be0ccfc892fa433d6c837df9522a'
406 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b' 421 checkvars['SRC_URI[sha256sum]'] = 'b6d1a5a610aae6ff39d93de5efd0fdc787aa9e9dc1e7026fa4c961b26563526b'
407 checkvars['DEPENDS'] = set(['boost', 'zlib']) 422 checkvars['DEPENDS'] = set(['boost', 'zlib'])
408 inherits = ['cmake'] 423 inherits = ['cmake']
@@ -424,77 +439,271 @@ class RecipetoolTests(RecipetoolBase):
424 checkvars = {} 439 checkvars = {}
425 checkvars['SUMMARY'] = 'Node Server Example' 440 checkvars['SUMMARY'] = 'Node Server Example'
426 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme' 441 checkvars['HOMEPAGE'] = 'https://github.com/savoirfairelinux/node-server-example#readme'
427 checkvars['LICENSE'] = set(['MIT', 'ISC', 'Unknown']) 442 checkvars['LICENSE'] = 'BSD-3-Clause & ISC & MIT & Unknown'
428 urls = [] 443 urls = []
429 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}') 444 urls.append('npm://registry.npmjs.org/;package=@savoirfairelinux/node-server-example;version=${PV}')
430 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json') 445 urls.append('npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json')
431 checkvars['SRC_URI'] = set(urls) 446 checkvars['SRC_URI'] = set(urls)
432 checkvars['S'] = '${WORKDIR}/npm' 447 checkvars['S'] = '${WORKDIR}/npm'
433 checkvars['LICENSE_${PN}'] = 'MIT' 448 checkvars['LICENSE:${PN}'] = 'MIT'
434 checkvars['LICENSE_${PN}-base64'] = 'Unknown' 449 checkvars['LICENSE:${PN}-base64'] = 'Unknown'
435 checkvars['LICENSE_${PN}-accepts'] = 'MIT' 450 checkvars['LICENSE:${PN}-accepts'] = 'MIT'
436 checkvars['LICENSE_${PN}-inherits'] = 'ISC' 451 checkvars['LICENSE:${PN}-inherits'] = 'ISC'
437 inherits = ['npm'] 452 inherits = ['npm']
438 self._test_recipe_contents(recipefile, checkvars, inherits) 453 self._test_recipe_contents(recipefile, checkvars, inherits)
439 454
440 def test_recipetool_create_github(self): 455 def test_recipetool_create_github(self):
441 # Basic test to see if github URL mangling works 456 # Basic test to see if github URL mangling works. Deliberately use an
457 # older release of Meson at present so we don't need a toml parser.
442 temprecipe = os.path.join(self.tempdir, 'recipe') 458 temprecipe = os.path.join(self.tempdir, 'recipe')
443 os.makedirs(temprecipe) 459 os.makedirs(temprecipe)
444 recipefile = os.path.join(temprecipe, 'meson_git.bb') 460 recipefile = os.path.join(temprecipe, 'python3-meson_git.bb')
445 srcuri = 'https://github.com/mesonbuild/meson;rev=0.32.0' 461 srcuri = 'https://github.com/mesonbuild/meson;rev=0.52.1'
446 result = runCmd(['recipetool', 'create', '-o', temprecipe, srcuri]) 462 cmd = ['recipetool', 'create', '-o', temprecipe, srcuri]
447 self.assertTrue(os.path.isfile(recipefile)) 463 result = runCmd(cmd)
464 self.assertTrue(os.path.isfile(recipefile), msg="recipe %s not created for command %s, output %s" % (recipefile, " ".join(cmd), result.output))
448 checkvars = {} 465 checkvars = {}
449 checkvars['LICENSE'] = set(['Apache-2.0']) 466 checkvars['LICENSE'] = set(['Apache-2.0', "Unknown"])
450 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https' 467 checkvars['SRC_URI'] = 'git://github.com/mesonbuild/meson;protocol=https;branch=0.52'
451 inherits = ['setuptools3'] 468 inherits = ['setuptools3']
452 self._test_recipe_contents(recipefile, checkvars, inherits) 469 self._test_recipe_contents(recipefile, checkvars, inherits)
453 470
454 def test_recipetool_create_python3_setuptools(self): 471 def test_recipetool_create_python3_setuptools(self):
455 # Test creating python3 package from tarball (using setuptools3 class) 472 # Test creating python3 package from tarball (using setuptools3 class)
473 # Use the --no-pypi switch to avoid creating a pypi enabled recipe and
474 # and check the created recipe as if it was a more general tarball
456 temprecipe = os.path.join(self.tempdir, 'recipe') 475 temprecipe = os.path.join(self.tempdir, 'recipe')
457 os.makedirs(temprecipe) 476 os.makedirs(temprecipe)
458 pn = 'python-magic' 477 pn = 'python-magic'
459 pv = '0.4.15' 478 pv = '0.4.15'
460 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 479 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
461 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv 480 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
462 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 481 result = runCmd('recipetool create --no-pypi -o %s %s' % (temprecipe, srcuri))
463 self.assertTrue(os.path.isfile(recipefile)) 482 self.assertTrue(os.path.isfile(recipefile))
464 checkvars = {} 483 checkvars = {}
465 checkvars['LICENSE'] = set(['MIT']) 484 checkvars['LICENSE'] = set(['MIT'])
466 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88' 485 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
467 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz' 486 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-${PV}.tar.gz'
468 checkvars['SRC_URI[md5sum]'] = 'e384c95a47218f66c6501cd6dd45ff59'
469 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5' 487 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
470 inherits = ['setuptools3'] 488 inherits = ['setuptools3']
471 self._test_recipe_contents(recipefile, checkvars, inherits) 489 self._test_recipe_contents(recipefile, checkvars, inherits)
472 490
473 def test_recipetool_create_python3_distutils(self): 491 def test_recipetool_create_python3_setuptools_pypi_tarball(self):
474 # Test creating python3 package from tarball (using distutils3 class) 492 # Test creating python3 package from tarball (using setuptools3 and pypi classes)
475 temprecipe = os.path.join(self.tempdir, 'recipe') 493 temprecipe = os.path.join(self.tempdir, 'recipe')
476 os.makedirs(temprecipe) 494 os.makedirs(temprecipe)
477 pn = 'docutils' 495 pn = 'python-magic'
478 pv = '0.14' 496 pv = '0.4.15'
479 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv)) 497 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
480 srcuri = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-%s.tar.gz' % pv 498 srcuri = 'https://files.pythonhosted.org/packages/84/30/80932401906eaf787f2e9bd86dc458f1d2e75b064b4c187341f29516945c/python-magic-%s.tar.gz' % pv
481 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 499 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
482 self.assertTrue(os.path.isfile(recipefile)) 500 self.assertTrue(os.path.isfile(recipefile))
483 checkvars = {} 501 checkvars = {}
484 checkvars['LICENSE'] = set(['PSF', '&', 'BSD', 'GPL']) 502 checkvars['LICENSE'] = set(['MIT'])
485 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING.txt;md5=35a23d42b615470583563132872c97d6' 503 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
486 checkvars['SRC_URI'] = 'https://files.pythonhosted.org/packages/84/f4/5771e41fdf52aabebbadecc9381d11dea0fa34e4759b4071244fa094804c/docutils-${PV}.tar.gz' 504 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
487 checkvars['SRC_URI[md5sum]'] = 'c53768d63db3873b7d452833553469de' 505 checkvars['PYPI_PACKAGE'] = pn
488 checkvars['SRC_URI[sha256sum]'] = '51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274' 506 inherits = ['setuptools3', 'pypi']
489 inherits = ['distutils3'] 507 self._test_recipe_contents(recipefile, checkvars, inherits)
508
509 def test_recipetool_create_python3_setuptools_pypi(self):
510 # Test creating python3 package from pypi url (using setuptools3 and pypi classes)
511 # Intentionnaly using setuptools3 class here instead of any of the pep517 class
512 # to avoid the toml dependency and allows this test to run on host autobuilders
513 # with older version of python
514 temprecipe = os.path.join(self.tempdir, 'recipe')
515 os.makedirs(temprecipe)
516 pn = 'python-magic'
517 pv = '0.4.15'
518 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, pv))
519 # First specify the required version in the url
520 srcuri = 'https://pypi.org/project/%s/%s' % (pn, pv)
521 runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
522 self.assertTrue(os.path.isfile(recipefile))
523 checkvars = {}
524 checkvars['LICENSE'] = set(['MIT'])
525 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
526 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
527 checkvars['PYPI_PACKAGE'] = pn
528 inherits = ['setuptools3', "pypi"]
529 self._test_recipe_contents(recipefile, checkvars, inherits)
530
531 # Now specify the version as a recipetool parameter
532 runCmd('rm -rf %s' % recipefile)
533 self.assertFalse(os.path.isfile(recipefile))
534 srcuri = 'https://pypi.org/project/%s' % pn
535 runCmd('recipetool create -o %s %s --version %s' % (temprecipe, srcuri, pv))
536 self.assertTrue(os.path.isfile(recipefile))
537 checkvars = {}
538 checkvars['LICENSE'] = set(['MIT'])
539 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=16a934f165e8c3245f241e77d401bb88'
540 checkvars['SRC_URI[sha256sum]'] = 'f3765c0f582d2dfc72c15f3b5a82aecfae9498bd29ca840d72f37d7bd38bfcd5'
541 checkvars['PYPI_PACKAGE'] = pn
542 inherits = ['setuptools3', "pypi"]
543 self._test_recipe_contents(recipefile, checkvars, inherits)
544
545 # Now, try to grab latest version of the package, so we cannot guess the name of the recipe,
546 # unless hardcoding the latest version but it means we will need to update the test for each release,
547 # so use a regexp
548 runCmd('rm -rf %s' % recipefile)
549 self.assertFalse(os.path.isfile(recipefile))
550 recipefile_re = r'%s_(.*)\.bb' % pn
551 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
552 dirlist = os.listdir(temprecipe)
553 if len(dirlist) > 1:
554 self.fail('recipetool created more than just one file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
555 if len(dirlist) < 1 or not os.path.isfile(os.path.join(temprecipe, dirlist[0])):
556 self.fail('recipetool did not create recipe file; output:\n%s\ndirlist:\n%s' % (result.output, str(dirlist)))
557 import re
558 match = re.match(recipefile_re, dirlist[0])
559 self.assertTrue(match)
560 latest_pv = match.group(1)
561 self.assertTrue(latest_pv != pv)
562 recipefile = os.path.join(temprecipe, '%s_%s.bb' % (pn, latest_pv))
563 # Do not check LIC_FILES_CHKSUM and SRC_URI checksum here to avoid having updating the test on each release
564 checkvars = {}
565 checkvars['LICENSE'] = set(['MIT'])
566 checkvars['PYPI_PACKAGE'] = pn
567 inherits = ['setuptools3', "pypi"]
568 self._test_recipe_contents(recipefile, checkvars, inherits)
569
570 def test_recipetool_create_python3_pep517_setuptools_build_meta(self):
571 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
572 needTomllib(self)
573
574 # Test creating python3 package from tarball (using setuptools.build_meta class)
575 temprecipe = os.path.join(self.tempdir, 'recipe')
576 os.makedirs(temprecipe)
577 pn = 'webcolors'
578 pv = '1.13'
579 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
580 srcuri = 'https://files.pythonhosted.org/packages/a1/fb/f95560c6a5d4469d9c49e24cf1b5d4d21ffab5608251c6020a965fb7791c/%s-%s.tar.gz' % (pn, pv)
581 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
582 self.assertTrue(os.path.isfile(recipefile))
583 checkvars = {}
584 checkvars['SUMMARY'] = 'A library for working with the color formats defined by HTML and CSS.'
585 checkvars['LICENSE'] = set(['BSD-3-Clause'])
586 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=702b1ef12cf66832a88f24c8f2ee9c19'
587 checkvars['SRC_URI[sha256sum]'] = 'c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a'
588 inherits = ['python_setuptools_build_meta', 'pypi']
589
590 self._test_recipe_contents(recipefile, checkvars, inherits)
591
592 def test_recipetool_create_python3_pep517_poetry_core_masonry_api(self):
593 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
594 needTomllib(self)
595
596 # Test creating python3 package from tarball (using poetry.core.masonry.api class)
597 temprecipe = os.path.join(self.tempdir, 'recipe')
598 os.makedirs(temprecipe)
599 pn = 'iso8601'
600 pv = '2.1.0'
601 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
602 srcuri = 'https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/%s-%s.tar.gz' % (pn, pv)
603 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
604 self.assertTrue(os.path.isfile(recipefile))
605 checkvars = {}
606 checkvars['SUMMARY'] = 'Simple module to parse ISO 8601 dates'
607 checkvars['LICENSE'] = set(['MIT'])
608 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=aab31f2ef7ba214a5a341eaa47a7f367'
609 checkvars['SRC_URI[sha256sum]'] = '6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df'
610 inherits = ['python_poetry_core', 'pypi']
611
612 self._test_recipe_contents(recipefile, checkvars, inherits)
613
614 def test_recipetool_create_python3_pep517_flit_core_buildapi(self):
615 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
616 needTomllib(self)
617
618 # Test creating python3 package from tarball (using flit_core.buildapi class)
619 temprecipe = os.path.join(self.tempdir, 'recipe')
620 os.makedirs(temprecipe)
621 pn = 'typing-extensions'
622 pv = '4.8.0'
623 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
624 srcuri = 'https://files.pythonhosted.org/packages/1f/7a/8b94bb016069caa12fc9f587b28080ac33b4fbb8ca369b98bc0a4828543e/typing_extensions-%s.tar.gz' % pv
625 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
626 self.assertTrue(os.path.isfile(recipefile))
627 checkvars = {}
628 checkvars['SUMMARY'] = 'Backported and Experimental Type Hints for Python 3.8+'
629 checkvars['LICENSE'] = set(['PSF-2.0'])
630 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=fcf6b249c2641540219a727f35d8d2c2'
631 checkvars['SRC_URI[sha256sum]'] = 'df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef'
632 inherits = ['python_flit_core', 'pypi']
633
634 self._test_recipe_contents(recipefile, checkvars, inherits)
635
636 def test_recipetool_create_python3_pep517_hatchling(self):
637 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
638 needTomllib(self)
639
640 # Test creating python3 package from tarball (using hatchling class)
641 temprecipe = os.path.join(self.tempdir, 'recipe')
642 os.makedirs(temprecipe)
643 pn = 'jsonschema'
644 pv = '4.19.1'
645 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
646 srcuri = 'https://files.pythonhosted.org/packages/e4/43/087b24516db11722c8687e0caf0f66c7785c0b1c51b0ab951dfde924e3f5/jsonschema-%s.tar.gz' % pv
647 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
648 self.assertTrue(os.path.isfile(recipefile))
649 checkvars = {}
650 checkvars['SUMMARY'] = 'An implementation of JSON Schema validation for Python'
651 checkvars['HOMEPAGE'] = 'https://github.com/python-jsonschema/jsonschema'
652 checkvars['LICENSE'] = set(['MIT'])
653 checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7a60a81c146ec25599a3e1dabb8610a8 file://json/LICENSE;md5=9d4de43111d33570c8fe49b4cb0e01af'
654 checkvars['SRC_URI[sha256sum]'] = 'ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf'
655 inherits = ['python_hatchling', 'pypi']
656
657 self._test_recipe_contents(recipefile, checkvars, inherits)
658
659 def test_recipetool_create_python3_pep517_maturin(self):
660 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
661 needTomllib(self)
662
663 # Test creating python3 package from tarball (using maturin class)
664 temprecipe = os.path.join(self.tempdir, 'recipe')
665 os.makedirs(temprecipe)
666 pn = 'pydantic-core'
667 pv = '2.14.5'
668 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
669 srcuri = 'https://files.pythonhosted.org/packages/64/26/cffb93fe9c6b5a91c497f37fae14a4b073ecbc47fc36a9979c7aa888b245/pydantic_core-%s.tar.gz' % pv
670 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
671 self.assertTrue(os.path.isfile(recipefile))
672 checkvars = {}
673 checkvars['HOMEPAGE'] = 'https://github.com/pydantic/pydantic-core'
674 checkvars['LICENSE'] = set(['MIT'])
675 checkvars['LIC_FILES_CHKSUM'] = 'file://LICENSE;md5=ab599c188b4a314d2856b3a55030c75c'
676 checkvars['SRC_URI[sha256sum]'] = '6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71'
677 inherits = ['python_maturin', 'pypi']
678
679 self._test_recipe_contents(recipefile, checkvars, inherits)
680
681 def test_recipetool_create_python3_pep517_mesonpy(self):
682 # This test require python 3.11 or above for the tomllib module or tomli module to be installed
683 needTomllib(self)
684
685 # Test creating python3 package from tarball (using mesonpy class)
686 temprecipe = os.path.join(self.tempdir, 'recipe')
687 os.makedirs(temprecipe)
688 pn = 'siphash24'
689 pv = '1.4'
690 recipefile = os.path.join(temprecipe, 'python3-%s_%s.bb' % (pn, pv))
691 srcuri = 'https://files.pythonhosted.org/packages/c2/32/b934a70592f314afcfa86c7f7e388804a8061be65b822e2aa07e573b6477/%s-%s.tar.gz' % (pn, pv)
692 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
693 self.assertTrue(os.path.isfile(recipefile))
694 checkvars = {}
695 checkvars['SRC_URI[sha256sum]'] = '7fd65e39b2a7c8c4ddc3a168a687f4610751b0ac2ebb518783c0cdfc30bec4a0'
696 inherits = ['python_mesonpy', 'pypi']
697
490 self._test_recipe_contents(recipefile, checkvars, inherits) 698 self._test_recipe_contents(recipefile, checkvars, inherits)
491 699
492 def test_recipetool_create_github_tarball(self): 700 def test_recipetool_create_github_tarball(self):
493 # Basic test to ensure github URL mangling doesn't apply to release tarballs 701 # Basic test to ensure github URL mangling doesn't apply to release tarballs.
702 # Deliberately use an older release of Meson at present so we don't need a toml parser.
494 temprecipe = os.path.join(self.tempdir, 'recipe') 703 temprecipe = os.path.join(self.tempdir, 'recipe')
495 os.makedirs(temprecipe) 704 os.makedirs(temprecipe)
496 pv = '0.32.0' 705 pv = '0.52.1'
497 recipefile = os.path.join(temprecipe, 'meson_%s.bb' % pv) 706 recipefile = os.path.join(temprecipe, 'python3-meson_%s.bb' % pv)
498 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv) 707 srcuri = 'https://github.com/mesonbuild/meson/releases/download/%s/meson-%s.tar.gz' % (pv, pv)
499 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 708 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri))
500 self.assertTrue(os.path.isfile(recipefile)) 709 self.assertTrue(os.path.isfile(recipefile))
@@ -504,27 +713,302 @@ class RecipetoolTests(RecipetoolBase):
504 inherits = ['setuptools3'] 713 inherits = ['setuptools3']
505 self._test_recipe_contents(recipefile, checkvars, inherits) 714 self._test_recipe_contents(recipefile, checkvars, inherits)
506 715
507 def test_recipetool_create_git_http(self): 716 def _test_recipetool_create_git(self, srcuri, branch=None):
508 # Basic test to check http git URL mangling works 717 # Basic test to check http git URL mangling works
509 temprecipe = os.path.join(self.tempdir, 'recipe') 718 temprecipe = os.path.join(self.tempdir, 'recipe')
510 os.makedirs(temprecipe) 719 os.makedirs(temprecipe)
511 recipefile = os.path.join(temprecipe, 'matchbox-terminal_git.bb') 720 name = srcuri.split(';')[0].split('/')[-1]
512 srcuri = 'http://git.yoctoproject.org/git/matchbox-terminal' 721 recipefile = os.path.join(temprecipe, name + '_git.bb')
513 result = runCmd('recipetool create -o %s %s' % (temprecipe, srcuri)) 722 options = ' -B %s' % branch if branch else ''
723 result = runCmd('recipetool create -o %s%s "%s"' % (temprecipe, options, srcuri))
514 self.assertTrue(os.path.isfile(recipefile)) 724 self.assertTrue(os.path.isfile(recipefile))
515 checkvars = {} 725 checkvars = {}
516 checkvars['LICENSE'] = set(['GPLv2']) 726 checkvars['SRC_URI'] = srcuri
517 checkvars['SRC_URI'] = 'git://git.yoctoproject.org/git/matchbox-terminal;protocol=http' 727 for scheme in ['http', 'https']:
518 inherits = ['pkgconfig', 'autotools'] 728 if srcuri.startswith(scheme + ":"):
729 checkvars['SRC_URI'] = 'git%s;protocol=%s' % (srcuri[len(scheme):], scheme)
730 if ';branch=' not in srcuri:
731 checkvars['SRC_URI'] += ';branch=' + (branch or 'master')
732 self._test_recipe_contents(recipefile, checkvars, [])
733
734 def test_recipetool_create_git_http(self):
735 self._test_recipetool_create_git('http://git.yoctoproject.org/git/matchbox-keyboard')
736
737 def test_recipetool_create_git_srcuri_master(self):
738 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=master;protocol=https')
739
740 def test_recipetool_create_git_srcuri_branch(self):
741 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;branch=matchbox-keyboard-0-1;protocol=https')
742
743 def test_recipetool_create_git_srcbranch(self):
744 self._test_recipetool_create_git('git://git.yoctoproject.org/matchbox-keyboard;protocol=https', 'matchbox-keyboard-0-1')
745
746 def _go_urifiy(self, url, version, modulepath = None, pathmajor = None, subdir = None):
747 modulepath = ",path='%s'" % modulepath if len(modulepath) else ''
748 pathmajor = ",pathmajor='%s'" % pathmajor if len(pathmajor) else ''
749 subdir = ",subdir='%s'" % subdir if len(subdir) else ''
750 return "${@go_src_uri('%s','%s'%s%s%s)}" % (url, version, modulepath, pathmajor, subdir)
751
752 def test_recipetool_create_go(self):
753 # Basic test to check go recipe generation
754 temprecipe = os.path.join(self.tempdir, 'recipe')
755 os.makedirs(temprecipe)
756
757 recipefile = os.path.join(temprecipe, 'edgex-go_git.bb')
758 deps_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-modules.inc')
759 lics_require_file = os.path.join(temprecipe, 'edgex-go', 'edgex-go-licenses.inc')
760 modules_txt_file = os.path.join(temprecipe, 'edgex-go', 'modules.txt')
761
762 srcuri = 'https://github.com/edgexfoundry/edgex-go.git'
763 srcrev = "v3.0.0"
764 srcbranch = "main"
765
766 result = runCmd('recipetool create -o %s %s -S %s -B %s' % (temprecipe, srcuri, srcrev, srcbranch))
767
768 self.maxDiff = None
769 inherits = ['go-vendor']
770
771 checkvars = {}
772 checkvars['GO_IMPORT'] = "github.com/edgexfoundry/edgex-go"
773 checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
774 'file://modules.txt'}
775 checkvars['LIC_FILES_CHKSUM'] = {'file://src/${GO_IMPORT}/LICENSE;md5=8f8bc924cf73f6a32381e5fd4c58d603'}
776
777 self.assertTrue(os.path.isfile(recipefile))
778 self._test_recipe_contents(recipefile, checkvars, inherits)
779
780 checkvars = {}
781 checkvars['VENDORED_LIC_FILES_CHKSUM'] = set(
782 ['file://src/${GO_IMPORT}/vendor/github.com/Microsoft/go-winio/LICENSE;md5=69205ff73858f2c22b2ca135b557e8ef',
783 'file://src/${GO_IMPORT}/vendor/github.com/armon/go-metrics/LICENSE;md5=d2d77030c0183e3d1e66d26dc1f243be',
784 'file://src/${GO_IMPORT}/vendor/github.com/cenkalti/backoff/LICENSE;md5=1571d94433e3f3aa05267efd4dbea68b',
785 'file://src/${GO_IMPORT}/vendor/github.com/davecgh/go-spew/LICENSE;md5=c06795ed54b2a35ebeeb543cd3a73e56',
786 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/LICENSE;md5=dcdb33474b60c38efd27356d8f2edec7',
787 'file://src/${GO_IMPORT}/vendor/github.com/eclipse/paho.mqtt.golang/edl-v10;md5=3adfcc70f5aeb7a44f3f9b495aa1fbf3',
788 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-bootstrap/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
789 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-configuration/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
790 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-core-contracts/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
791 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-messaging/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
792 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-registry/v3/LICENSE;md5=0d6dae39976133b2851fba4c1e1275ff',
793 'file://src/${GO_IMPORT}/vendor/github.com/edgexfoundry/go-mod-secrets/v3/LICENSE;md5=f9fa2f4f8e0ef8cc7b5dd150963eb457',
794 'file://src/${GO_IMPORT}/vendor/github.com/fatih/color/LICENSE.md;md5=316e6d590bdcde7993fb175662c0dd5a',
795 'file://src/${GO_IMPORT}/vendor/github.com/fxamacker/cbor/v2/LICENSE;md5=827f5a2fa861382d35a3943adf9ebb86',
796 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
797 'file://src/${GO_IMPORT}/vendor/github.com/go-jose/go-jose/v3/json/LICENSE;md5=591778525c869cdde0ab5a1bf283cd81',
798 'file://src/${GO_IMPORT}/vendor/github.com/go-kit/log/LICENSE;md5=5b7c15ad5fffe2ff6e9d58a6c161f082',
799 'file://src/${GO_IMPORT}/vendor/github.com/go-logfmt/logfmt/LICENSE;md5=98e39517c38127f969de33057067091e',
800 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/locales/LICENSE;md5=3ccbda375ee345400ad1da85ba522301',
801 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/universal-translator/LICENSE;md5=2e2b21ef8f61057977d27c727c84bef1',
802 'file://src/${GO_IMPORT}/vendor/github.com/go-playground/validator/v10/LICENSE;md5=a718a0f318d76f7c5d510cbae84f0b60',
803 'file://src/${GO_IMPORT}/vendor/github.com/go-redis/redis/v7/LICENSE;md5=58103aa5ea1ee9b7a369c9c4a95ef9b5',
804 'file://src/${GO_IMPORT}/vendor/github.com/golang/protobuf/LICENSE;md5=939cce1ec101726fa754e698ac871622',
805 'file://src/${GO_IMPORT}/vendor/github.com/gomodule/redigo/LICENSE;md5=2ee41112a44fe7014dce33e26468ba93',
806 'file://src/${GO_IMPORT}/vendor/github.com/google/uuid/LICENSE;md5=88073b6dd8ec00fe09da59e0b6dfded1',
807 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/mux/LICENSE;md5=33fa1116c45f9e8de714033f99edde13',
808 'file://src/${GO_IMPORT}/vendor/github.com/gorilla/websocket/LICENSE;md5=c007b54a1743d596f46b2748d9f8c044',
809 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/consul/api/LICENSE;md5=b8a277a612171b7526e9be072f405ef4',
810 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/errwrap/LICENSE;md5=b278a92d2c1509760384428817710378',
811 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-cleanhttp/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
812 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-hclog/LICENSE;md5=ec7f605b74b9ad03347d0a93a5cc7eb8',
813 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-immutable-radix/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
814 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-multierror/LICENSE;md5=d44fdeb607e2d2614db9464dbedd4094',
815 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/go-rootcerts/LICENSE;md5=65d26fcc2f35ea6a181ac777e42db1ea',
816 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/golang-lru/LICENSE;md5=f27a50d2e878867827842f2c60e30bfc',
817 'file://src/${GO_IMPORT}/vendor/github.com/hashicorp/serf/LICENSE;md5=b278a92d2c1509760384428817710378',
818 'file://src/${GO_IMPORT}/vendor/github.com/leodido/go-urn/LICENSE;md5=8f50db5538ec1148a9b3d14ed96c3418',
819 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-colorable/LICENSE;md5=24ce168f90aec2456a73de1839037245',
820 'file://src/${GO_IMPORT}/vendor/github.com/mattn/go-isatty/LICENSE;md5=f509beadd5a11227c27b5d2ad6c9f2c6',
821 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/consulstructure/LICENSE;md5=96ada10a9e51c98c4656f2cede08c673',
822 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/copystructure/LICENSE;md5=56da355a12d4821cda57b8f23ec34bc4',
823 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/go-homedir/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
824 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/mapstructure/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
825 'file://src/${GO_IMPORT}/vendor/github.com/mitchellh/reflectwalk/LICENSE;md5=3f7765c3d4f58e1f84c4313cecf0f5bd',
826 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nats.go/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
827 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nkeys/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
828 'file://src/${GO_IMPORT}/vendor/github.com/nats-io/nuid/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
829 'file://src/${GO_IMPORT}/vendor/github.com/pmezard/go-difflib/LICENSE;md5=e9a2ebb8de779a07500ddecca806145e',
830 'file://src/${GO_IMPORT}/vendor/github.com/rcrowley/go-metrics/LICENSE;md5=1bdf5d819f50f141366dabce3be1460f',
831 'file://src/${GO_IMPORT}/vendor/github.com/spiffe/go-spiffe/v2/LICENSE;md5=86d3f3a95c324c9479bd8986968f4327',
832 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/objx/LICENSE;md5=d023fd31d3ca39ec61eec65a91732735',
833 'file://src/${GO_IMPORT}/vendor/github.com/stretchr/testify/LICENSE;md5=188f01994659f3c0d310612333d2a26f',
834 'file://src/${GO_IMPORT}/vendor/github.com/x448/float16/LICENSE;md5=de8f8e025d57fe7ee0b67f30d571323b',
835 'file://src/${GO_IMPORT}/vendor/github.com/zeebo/errs/LICENSE;md5=84914ab36fc0eb48edbaa53e66e8d326',
836 'file://src/${GO_IMPORT}/vendor/golang.org/x/crypto/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
837 'file://src/${GO_IMPORT}/vendor/golang.org/x/mod/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
838 'file://src/${GO_IMPORT}/vendor/golang.org/x/net/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
839 'file://src/${GO_IMPORT}/vendor/golang.org/x/sync/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
840 'file://src/${GO_IMPORT}/vendor/golang.org/x/sys/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
841 'file://src/${GO_IMPORT}/vendor/golang.org/x/text/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
842 'file://src/${GO_IMPORT}/vendor/golang.org/x/tools/LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707',
843 'file://src/${GO_IMPORT}/vendor/google.golang.org/genproto/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
844 'file://src/${GO_IMPORT}/vendor/google.golang.org/grpc/LICENSE;md5=3b83ef96387f14655fc854ddc3c6bd57',
845 'file://src/${GO_IMPORT}/vendor/google.golang.org/protobuf/LICENSE;md5=02d4002e9171d41a8fad93aa7faf3956',
846 'file://src/${GO_IMPORT}/vendor/gopkg.in/eapache/queue.v1/LICENSE;md5=1bfd4408d3de090ef6b908b0cc45a316',
847 'file://src/${GO_IMPORT}/vendor/gopkg.in/yaml.v3/LICENSE;md5=3c91c17266710e16afdbb2b6d15c761c'])
848
849 self.assertTrue(os.path.isfile(lics_require_file))
850 self._test_recipe_contents(lics_require_file, checkvars, [])
851
852 dependencies = \
853 [ ('github.com/eclipse/paho.mqtt.golang','v1.4.2', '', '', ''),
854 ('github.com/edgexfoundry/go-mod-bootstrap','v3.0.1','github.com/edgexfoundry/go-mod-bootstrap/v3','/v3', ''),
855 ('github.com/edgexfoundry/go-mod-configuration','v3.0.0','github.com/edgexfoundry/go-mod-configuration/v3','/v3', ''),
856 ('github.com/edgexfoundry/go-mod-core-contracts','v3.0.0','github.com/edgexfoundry/go-mod-core-contracts/v3','/v3', ''),
857 ('github.com/edgexfoundry/go-mod-messaging','v3.0.0','github.com/edgexfoundry/go-mod-messaging/v3','/v3', ''),
858 ('github.com/edgexfoundry/go-mod-secrets','v3.0.1','github.com/edgexfoundry/go-mod-secrets/v3','/v3', ''),
859 ('github.com/fxamacker/cbor','v2.4.0','github.com/fxamacker/cbor/v2','/v2', ''),
860 ('github.com/gomodule/redigo','v1.8.9', '', '', ''),
861 ('github.com/google/uuid','v1.3.0', '', '', ''),
862 ('github.com/gorilla/mux','v1.8.0', '', '', ''),
863 ('github.com/rcrowley/go-metrics','v0.0.0-20201227073835-cf1acfcdf475', '', '', ''),
864 ('github.com/spiffe/go-spiffe','v2.1.4','github.com/spiffe/go-spiffe/v2','/v2', ''),
865 ('github.com/stretchr/testify','v1.8.2', '', '', ''),
866 ('go.googlesource.com/crypto','v0.8.0','golang.org/x/crypto', '', ''),
867 ('gopkg.in/eapache/queue.v1','v1.1.0', '', '', ''),
868 ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
869 ('github.com/microsoft/go-winio','v0.6.0','github.com/Microsoft/go-winio', '', ''),
870 ('github.com/hashicorp/go-metrics','v0.3.10','github.com/armon/go-metrics', '', ''),
871 ('github.com/cenkalti/backoff','v2.2.1+incompatible', '', '', ''),
872 ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
873 ('github.com/edgexfoundry/go-mod-registry','v3.0.0','github.com/edgexfoundry/go-mod-registry/v3','/v3', ''),
874 ('github.com/fatih/color','v1.9.0', '', '', ''),
875 ('github.com/go-jose/go-jose','v3.0.0','github.com/go-jose/go-jose/v3','/v3', ''),
876 ('github.com/go-kit/log','v0.2.1', '', '', ''),
877 ('github.com/go-logfmt/logfmt','v0.5.1', '', '', ''),
878 ('github.com/go-playground/locales','v0.14.1', '', '', ''),
879 ('github.com/go-playground/universal-translator','v0.18.1', '', '', ''),
880 ('github.com/go-playground/validator','v10.13.0','github.com/go-playground/validator/v10','/v10', ''),
881 ('github.com/go-redis/redis','v7.3.0','github.com/go-redis/redis/v7','/v7', ''),
882 ('github.com/golang/protobuf','v1.5.2', '', '', ''),
883 ('github.com/gorilla/websocket','v1.4.2', '', '', ''),
884 ('github.com/hashicorp/consul','v1.20.0','github.com/hashicorp/consul/api', '', 'api'),
885 ('github.com/hashicorp/errwrap','v1.0.0', '', '', ''),
886 ('github.com/hashicorp/go-cleanhttp','v0.5.1', '', '', ''),
887 ('github.com/hashicorp/go-hclog','v0.14.1', '', '', ''),
888 ('github.com/hashicorp/go-immutable-radix','v1.3.0', '', '', ''),
889 ('github.com/hashicorp/go-multierror','v1.1.1', '', '', ''),
890 ('github.com/hashicorp/go-rootcerts','v1.0.2', '', '', ''),
891 ('github.com/hashicorp/golang-lru','v0.5.4', '', '', ''),
892 ('github.com/hashicorp/serf','v0.10.1', '', '', ''),
893 ('github.com/leodido/go-urn','v1.2.3', '', '', ''),
894 ('github.com/mattn/go-colorable','v0.1.12', '', '', ''),
895 ('github.com/mattn/go-isatty','v0.0.14', '', '', ''),
896 ('github.com/mitchellh/consulstructure','v0.0.0-20190329231841-56fdc4d2da54', '', '', ''),
897 ('github.com/mitchellh/copystructure','v1.2.0', '', '', ''),
898 ('github.com/mitchellh/go-homedir','v1.1.0', '', '', ''),
899 ('github.com/mitchellh/mapstructure','v1.5.0', '', '', ''),
900 ('github.com/mitchellh/reflectwalk','v1.0.2', '', '', ''),
901 ('github.com/nats-io/nats.go','v1.25.0', '', '', ''),
902 ('github.com/nats-io/nkeys','v0.4.4', '', '', ''),
903 ('github.com/nats-io/nuid','v1.0.1', '', '', ''),
904 ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
905 ('github.com/stretchr/objx','v0.5.0', '', '', ''),
906 ('github.com/x448/float16','v0.8.4', '', '', ''),
907 ('github.com/zeebo/errs','v1.3.0', '', '', ''),
908 ('go.googlesource.com/mod','v0.8.0','golang.org/x/mod', '', ''),
909 ('go.googlesource.com/net','v0.9.0','golang.org/x/net', '', ''),
910 ('go.googlesource.com/sync','v0.1.0','golang.org/x/sync', '', ''),
911 ('go.googlesource.com/sys','v0.7.0','golang.org/x/sys', '', ''),
912 ('go.googlesource.com/text','v0.9.0','golang.org/x/text', '', ''),
913 ('go.googlesource.com/tools','v0.6.0','golang.org/x/tools', '', ''),
914 ('github.com/googleapis/go-genproto','v0.0.0-20230223222841-637eb2293923','google.golang.org/genproto', '', ''),
915 ('github.com/grpc/grpc-go','v1.53.0','google.golang.org/grpc', '', ''),
916 ('go.googlesource.com/protobuf','v1.28.1','google.golang.org/protobuf', '', ''),
917 ]
918
919 src_uri = set()
920 for d in dependencies:
921 src_uri.add(self._go_urifiy(*d))
922
923 checkvars = {}
924 checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
925
926 self.assertTrue(os.path.isfile(deps_require_file))
927 self._test_recipe_contents(deps_require_file, checkvars, [])
928
929 def test_recipetool_create_go_replace_modules(self):
930 # Check handling of replaced modules
931 temprecipe = os.path.join(self.tempdir, 'recipe')
932 os.makedirs(temprecipe)
933
934 recipefile = os.path.join(temprecipe, 'openapi-generator_git.bb')
935 deps_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-modules.inc')
936 lics_require_file = os.path.join(temprecipe, 'openapi-generator', 'go-licenses.inc')
937 modules_txt_file = os.path.join(temprecipe, 'openapi-generator', 'modules.txt')
938
939 srcuri = 'https://github.com/OpenAPITools/openapi-generator.git'
940 srcrev = "v7.2.0"
941 srcbranch = "master"
942 srcsubdir = "samples/openapi3/client/petstore/go"
943
944 result = runCmd('recipetool create -o %s %s -S %s -B %s --src-subdir %s' % (temprecipe, srcuri, srcrev, srcbranch, srcsubdir))
945
946 self.maxDiff = None
947 inherits = ['go-vendor']
948
949 checkvars = {}
950 checkvars['GO_IMPORT'] = "github.com/OpenAPITools/openapi-generator/samples/openapi3/client/petstore/go"
951 checkvars['SRC_URI'] = {'git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https',
952 'file://modules.txt'}
953
954 self.assertNotIn('Traceback', result.output)
955 self.assertIn('No license file was detected for the main module', result.output)
956 self.assertTrue(os.path.isfile(recipefile))
519 self._test_recipe_contents(recipefile, checkvars, inherits) 957 self._test_recipe_contents(recipefile, checkvars, inherits)
520 958
959 # make sure that dependencies don't mention local directory ./go-petstore
960 dependencies = \
961 [ ('github.com/stretchr/testify','v1.8.4', '', '', ''),
962 ('go.googlesource.com/oauth2','v0.10.0','golang.org/x/oauth2', '', ''),
963 ('github.com/davecgh/go-spew','v1.1.1', '', '', ''),
964 ('github.com/golang/protobuf','v1.5.3', '', '', ''),
965 ('github.com/kr/pretty','v0.3.0', '', '', ''),
966 ('github.com/pmezard/go-difflib','v1.0.0', '', '', ''),
967 ('github.com/rogpeppe/go-internal','v1.9.0', '', '', ''),
968 ('go.googlesource.com/net','v0.12.0','golang.org/x/net', '', ''),
969 ('github.com/golang/appengine','v1.6.7','google.golang.org/appengine', '', ''),
970 ('go.googlesource.com/protobuf','v1.31.0','google.golang.org/protobuf', '', ''),
971 ('gopkg.in/check.v1','v1.0.0-20201130134442-10cb98267c6c', '', '', ''),
972 ('gopkg.in/yaml.v3','v3.0.1', '', '', ''),
973 ]
974
975 src_uri = set()
976 for d in dependencies:
977 src_uri.add(self._go_urifiy(*d))
978
979 checkvars = {}
980 checkvars['GO_DEPENDENCIES_SRC_URI'] = src_uri
981
982 self.assertTrue(os.path.isfile(deps_require_file))
983 self._test_recipe_contents(deps_require_file, checkvars, [])
984
985class RecipetoolTests(RecipetoolBase):
986
987 @classmethod
988 def setUpClass(cls):
989 import sys
990
991 super(RecipetoolTests, cls).setUpClass()
992 bb_vars = get_bb_vars(['BBPATH'])
993 cls.bbpath = bb_vars['BBPATH']
994 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'recipetool')
995 sys.path.insert(0, libpath)
996
521 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): 997 def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths):
522 dstdir = basedstdir 998 dstdir = basedstdir
523 self.assertTrue(os.path.exists(dstdir)) 999 self.assertTrue(os.path.exists(dstdir))
524 for p in paths: 1000 for p in paths:
525 dstdir = os.path.join(dstdir, p) 1001 dstdir = os.path.join(dstdir, p)
526 if not os.path.exists(dstdir): 1002 if not os.path.exists(dstdir):
527 os.makedirs(dstdir) 1003 try:
1004 os.makedirs(dstdir)
1005 except PermissionError:
1006 return False
1007 except OSError as e:
1008 if e.errno == errno.EROFS:
1009 return False
1010 else:
1011 raise e
528 if p == "lib": 1012 if p == "lib":
529 # Can race with other tests 1013 # Can race with other tests
530 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir) 1014 self.add_command_to_tearDown('rmdir --ignore-fail-on-non-empty %s' % dstdir)
@@ -532,8 +1016,12 @@ class RecipetoolTests(RecipetoolBase):
532 self.track_for_cleanup(dstdir) 1016 self.track_for_cleanup(dstdir)
533 dstfile = os.path.join(dstdir, os.path.basename(srcfile)) 1017 dstfile = os.path.join(dstdir, os.path.basename(srcfile))
534 if srcfile != dstfile: 1018 if srcfile != dstfile:
535 shutil.copy(srcfile, dstfile) 1019 try:
1020 shutil.copy(srcfile, dstfile)
1021 except PermissionError:
1022 return False
536 self.track_for_cleanup(dstfile) 1023 self.track_for_cleanup(dstfile)
1024 return True
537 1025
538 def test_recipetool_load_plugin(self): 1026 def test_recipetool_load_plugin(self):
539 """Test that recipetool loads only the first found plugin in BBPATH.""" 1027 """Test that recipetool loads only the first found plugin in BBPATH."""
@@ -547,20 +1035,147 @@ class RecipetoolTests(RecipetoolBase):
547 plugincontent = fh.readlines() 1035 plugincontent = fh.readlines()
548 try: 1036 try:
549 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') 1037 self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found')
550 for path in searchpath: 1038 searchpath = [
551 self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') 1039 path for path in searchpath
1040 if self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool')
1041 ]
552 result = runCmd("recipetool --quiet count") 1042 result = runCmd("recipetool --quiet count")
553 self.assertEqual(result.output, '1') 1043 self.assertEqual(result.output, '1')
554 result = runCmd("recipetool --quiet multiloaded") 1044 result = runCmd("recipetool --quiet multiloaded")
555 self.assertEqual(result.output, "no") 1045 self.assertEqual(result.output, "no")
556 for path in searchpath: 1046 for path in searchpath:
557 result = runCmd("recipetool --quiet bbdir") 1047 result = runCmd("recipetool --quiet bbdir")
558 self.assertEqual(result.output, path) 1048 self.assertEqual(os.path.realpath(result.output), os.path.realpath(path))
559 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) 1049 os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py'))
560 finally: 1050 finally:
561 with open(srcfile, 'w') as fh: 1051 with open(srcfile, 'w') as fh:
562 fh.writelines(plugincontent) 1052 fh.writelines(plugincontent)
563 1053
1054 def test_recipetool_handle_license_vars(self):
1055 from create import handle_license_vars
1056 from unittest.mock import Mock
1057
1058 commonlicdir = get_bb_var('COMMON_LICENSE_DIR')
1059
1060 class DataConnectorCopy(bb.tinfoil.TinfoilDataStoreConnector):
1061 pass
1062
1063 d = DataConnectorCopy
1064 d.getVar = Mock(return_value=commonlicdir)
1065
1066 srctree = tempfile.mkdtemp(prefix='recipetoolqa')
1067 self.track_for_cleanup(srctree)
1068
1069 # Multiple licenses
1070 licenses = ['MIT', 'ISC', 'BSD-3-Clause', 'Apache-2.0']
1071 for licence in licenses:
1072 shutil.copy(os.path.join(commonlicdir, licence), os.path.join(srctree, 'LICENSE.' + licence))
1073 # Duplicate license
1074 shutil.copy(os.path.join(commonlicdir, 'MIT'), os.path.join(srctree, 'LICENSE'))
1075
1076 extravalues = {
1077 # Duplicate and missing licenses
1078 'LICENSE': 'Zlib & BSD-2-Clause & Zlib',
1079 'LIC_FILES_CHKSUM': [
1080 'file://README.md;md5=0123456789abcdef0123456789abcd'
1081 ]
1082 }
1083 lines_before = []
1084 handled = []
1085 licvalues = handle_license_vars(srctree, lines_before, handled, extravalues, d)
1086 expected_lines_before = [
1087 '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is',
1088 '# your responsibility to verify that the values are complete and correct.',
1089 '# NOTE: Original package / source metadata indicates license is: BSD-2-Clause & Zlib',
1090 '#',
1091 '# NOTE: multiple licenses have been detected; they have been separated with &',
1092 '# in the LICENSE value for now since it is a reasonable assumption that all',
1093 '# of the licenses apply. If instead there is a choice between the multiple',
1094 '# licenses then you should change the value to separate the licenses with |',
1095 '# instead of &. If there is any doubt, check the accompanying documentation',
1096 '# to determine which situation is applicable.',
1097 'LICENSE = "Apache-2.0 & BSD-2-Clause & BSD-3-Clause & ISC & MIT & Zlib"',
1098 'LIC_FILES_CHKSUM = "file://LICENSE;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
1099 ' file://LICENSE.Apache-2.0;md5=89aea4e17d99a7cacdbeed46a0096b10 \\\n'
1100 ' file://LICENSE.BSD-3-Clause;md5=550794465ba0ec5312d6919e203a55f9 \\\n'
1101 ' file://LICENSE.ISC;md5=f3b90e78ea0cffb20bf5cca7947a896d \\\n'
1102 ' file://LICENSE.MIT;md5=0835ade698e0bcf8506ecda2f7b4f302 \\\n'
1103 ' file://README.md;md5=0123456789abcdef0123456789abcd"',
1104 ''
1105 ]
1106 self.assertEqual(lines_before, expected_lines_before)
1107 expected_licvalues = [
1108 ('MIT', 'LICENSE', '0835ade698e0bcf8506ecda2f7b4f302'),
1109 ('Apache-2.0', 'LICENSE.Apache-2.0', '89aea4e17d99a7cacdbeed46a0096b10'),
1110 ('BSD-3-Clause', 'LICENSE.BSD-3-Clause', '550794465ba0ec5312d6919e203a55f9'),
1111 ('ISC', 'LICENSE.ISC', 'f3b90e78ea0cffb20bf5cca7947a896d'),
1112 ('MIT', 'LICENSE.MIT', '0835ade698e0bcf8506ecda2f7b4f302')
1113 ]
1114 self.assertEqual(handled, [('license', expected_licvalues)])
1115 self.assertEqual(extravalues, {})
1116 self.assertEqual(licvalues, expected_licvalues)
1117
1118
1119 def test_recipetool_split_pkg_licenses(self):
1120 from create import split_pkg_licenses
1121 licvalues = [
1122 # Duplicate licenses
1123 ('BSD-2-Clause', 'x/COPYING', None),
1124 ('BSD-2-Clause', 'x/LICENSE', None),
1125 # Multiple licenses
1126 ('MIT', 'x/a/LICENSE.MIT', None),
1127 ('ISC', 'x/a/LICENSE.ISC', None),
1128 # Alternative licenses
1129 ('(MIT | ISC)', 'x/b/LICENSE', None),
1130 # Alternative licenses without brackets
1131 ('MIT | BSD-2-Clause', 'x/c/LICENSE', None),
1132 # Multi licenses with alternatives
1133 ('MIT', 'x/d/COPYING', None),
1134 ('MIT | BSD-2-Clause', 'x/d/LICENSE', None),
1135 # Multi licenses with alternatives and brackets
1136 ('Apache-2.0 & ((MIT | ISC) & BSD-3-Clause)', 'x/e/LICENSE', None)
1137 ]
1138 packages = {
1139 '${PN}': '',
1140 'a': 'x/a',
1141 'b': 'x/b',
1142 'c': 'x/c',
1143 'd': 'x/d',
1144 'e': 'x/e',
1145 'f': 'x/f',
1146 'g': 'x/g',
1147 }
1148 fallback_licenses = {
1149 # Ignored
1150 'a': 'BSD-3-Clause',
1151 # Used
1152 'f': 'BSD-3-Clause'
1153 }
1154 outlines = []
1155 outlicenses = split_pkg_licenses(licvalues, packages, outlines, fallback_licenses)
1156 expected_outlicenses = {
1157 '${PN}': ['BSD-2-Clause'],
1158 'a': ['ISC', 'MIT'],
1159 'b': ['(ISC | MIT)'],
1160 'c': ['(BSD-2-Clause | MIT)'],
1161 'd': ['(BSD-2-Clause | MIT)', 'MIT'],
1162 'e': ['(ISC | MIT)', 'Apache-2.0', 'BSD-3-Clause'],
1163 'f': ['BSD-3-Clause'],
1164 'g': ['Unknown']
1165 }
1166 self.assertEqual(outlicenses, expected_outlicenses)
1167 expected_outlines = [
1168 'LICENSE:${PN} = "BSD-2-Clause"',
1169 'LICENSE:a = "ISC & MIT"',
1170 'LICENSE:b = "(ISC | MIT)"',
1171 'LICENSE:c = "(BSD-2-Clause | MIT)"',
1172 'LICENSE:d = "(BSD-2-Clause | MIT) & MIT"',
1173 'LICENSE:e = "(ISC | MIT) & Apache-2.0 & BSD-3-Clause"',
1174 'LICENSE:f = "BSD-3-Clause"',
1175 'LICENSE:g = "Unknown"'
1176 ]
1177 self.assertEqual(outlines, expected_outlines)
1178
564 1179
565class RecipetoolAppendsrcBase(RecipetoolBase): 1180class RecipetoolAppendsrcBase(RecipetoolBase):
566 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): 1181 def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles):
@@ -593,9 +1208,9 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
593 for uri in src_uri: 1208 for uri in src_uri:
594 p = urllib.parse.urlparse(uri) 1209 p = urllib.parse.urlparse(uri)
595 if p.scheme == 'file': 1210 if p.scheme == 'file':
596 return p.netloc + p.path 1211 return p.netloc + p.path, uri
597 1212
598 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, options=''): 1213 def _test_appendsrcfile(self, testrecipe, filename=None, destdir=None, has_src_uri=True, srcdir=None, newfile=None, remove=None, machine=None , options=''):
599 if newfile is None: 1214 if newfile is None:
600 newfile = self.testfile 1215 newfile = self.testfile
601 1216
@@ -620,14 +1235,42 @@ class RecipetoolAppendsrcBase(RecipetoolBase):
620 else: 1235 else:
621 destpath = '.' + os.sep 1236 destpath = '.' + os.sep
622 1237
623 expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', 1238 expectedlines = ['FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n',
624 '\n'] 1239 '\n']
1240
1241 override = ""
1242 if machine:
1243 options += ' -m %s' % machine
1244 override = ':append:%s' % machine
1245 expectedlines.extend(['PACKAGE_ARCH = "${MACHINE_ARCH}"\n',
1246 '\n'])
1247
1248 if remove:
1249 for entry in remove:
1250 if machine:
1251 entry_remove_line = 'SRC_URI:remove:%s = " %s"\n' % (machine, entry)
1252 else:
1253 entry_remove_line = 'SRC_URI:remove = "%s"\n' % entry
1254
1255 expectedlines.extend([entry_remove_line,
1256 '\n'])
1257
625 if has_src_uri: 1258 if has_src_uri:
626 uri = 'file://%s' % filename 1259 uri = 'file://%s' % filename
627 if expected_subdir: 1260 if expected_subdir:
628 uri += ';subdir=%s' % expected_subdir 1261 uri += ';subdir=%s' % expected_subdir
629 expectedlines[0:0] = ['SRC_URI += "%s"\n' % uri, 1262 if machine:
630 '\n'] 1263 src_uri_line = 'SRC_URI%s = " %s"\n' % (override, uri)
1264 else:
1265 src_uri_line = 'SRC_URI += "%s"\n' % uri
1266
1267 expectedlines.extend([src_uri_line, '\n'])
1268
1269 with open("/tmp/tmp.txt", "w") as file:
1270 print(expectedlines, file=file)
1271
1272 if machine:
1273 filename = '%s/%s' % (machine, filename)
631 1274
632 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename]) 1275 return self._try_recipetool_appendsrcfile(testrecipe, newfile, destpath, options, expectedlines, [filename])
633 1276
@@ -682,18 +1325,46 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase):
682 1325
683 def test_recipetool_appendsrcfile_existing_in_src_uri(self): 1326 def test_recipetool_appendsrcfile_existing_in_src_uri(self):
684 testrecipe = 'base-files' 1327 testrecipe = 'base-files'
685 filepath = self._get_first_file_uri(testrecipe) 1328 filepath,_ = self._get_first_file_uri(testrecipe)
686 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1329 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
687 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False) 1330 self._test_appendsrcfile(testrecipe, filepath, has_src_uri=False)
688 1331
689 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self): 1332 def test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(self, machine=None):
690 testrecipe = 'base-files' 1333 testrecipe = 'base-files'
691 subdir = 'tmp' 1334 subdir = 'tmp'
692 filepath = self._get_first_file_uri(testrecipe) 1335 filepath, srcuri_entry = self._get_first_file_uri(testrecipe)
693 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe) 1336 self.assertTrue(filepath, 'Unable to test, no file:// uri found in SRC_URI for %s' % testrecipe)
694 1337
695 output = self._test_appendsrcfile(testrecipe, filepath, subdir, has_src_uri=False) 1338 self._test_appendsrcfile(testrecipe, filepath, subdir, machine=machine, remove=[srcuri_entry])
696 self.assertTrue(any('with different parameters' in l for l in output)) 1339
1340 def test_recipetool_appendsrcfile_machine(self):
1341 # A very basic test
1342 self._test_appendsrcfile('base-files', 'a-file', machine='mymachine')
1343
1344 # Force cleaning the output of previous test
1345 self.tearDownLocal()
1346
1347 # A more complex test: existing entry in src_uri with different param
1348 self.test_recipetool_appendsrcfile_existing_in_src_uri_diff_params(machine='mymachine')
1349
1350 def test_recipetool_appendsrcfile_update_recipe_basic(self):
1351 testrecipe = "mtd-utils-selftest"
1352 recipefile = get_bb_var('FILE', testrecipe)
1353 self.assertIn('meta-selftest', recipefile, 'This test expect %s recipe to be in meta-selftest')
1354 cmd = 'recipetool appendsrcfile -W -u meta-selftest %s %s' % (testrecipe, self.testfile)
1355 result = runCmd(cmd)
1356 self.assertNotIn('Traceback', result.output)
1357 self.add_command_to_tearDown('cd %s; rm -f %s/%s; git checkout .' % (os.path.dirname(recipefile), testrecipe, os.path.basename(self.testfile)))
1358
1359 expected_status = [(' M', '.*/%s$' % os.path.basename(recipefile)),
1360 ('??', '.*/%s/%s$' % (testrecipe, os.path.basename(self.testfile)))]
1361 self._check_repo_status(os.path.dirname(recipefile), expected_status)
1362 result = runCmd('git diff %s' % os.path.basename(recipefile), cwd=os.path.dirname(recipefile))
1363 removelines = []
1364 addlines = [
1365 'file://%s \\\\' % os.path.basename(self.testfile),
1366 ]
1367 self._check_diff(result.output, addlines, removelines)
697 1368
698 def test_recipetool_appendsrcfile_replace_file_srcdir(self): 1369 def test_recipetool_appendsrcfile_replace_file_srcdir(self):
699 testrecipe = 'bash' 1370 testrecipe = 'bash'
diff --git a/meta/lib/oeqa/selftest/cases/recipeutils.py b/meta/lib/oeqa/selftest/cases/recipeutils.py
index 747870383b..2cb4445f81 100644
--- a/meta/lib/oeqa/selftest/cases/recipeutils.py
+++ b/meta/lib/oeqa/selftest/cases/recipeutils.py
@@ -1,15 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os
6import re
7import time
8import logging
9import bb.tinfoil 7import bb.tinfoil
10 8
11from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, get_test_layer 10from oeqa.utils.commands import get_test_layer
13 11
14 12
15def setUpModule(): 13def setUpModule():
@@ -40,7 +38,7 @@ class RecipeUtilsTests(OESelftestTestCase):
40 SUMMARY = "Python framework to process interdependent tasks in a pool of workers" 38 SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
41 HOMEPAGE = "http://github.com/gitpython-developers/async" 39 HOMEPAGE = "http://github.com/gitpython-developers/async"
42 SECTION = "devel/python" 40 SECTION = "devel/python"
43-LICENSE = "BSD" 41-LICENSE = "BSD-3-Clause"
44+LICENSE = "something" 42+LICENSE = "something"
45 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e" 43 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
46 44
@@ -52,7 +50,7 @@ class RecipeUtilsTests(OESelftestTestCase):
52+SRC_URI[md5sum] = "aaaaaa" 50+SRC_URI[md5sum] = "aaaaaa"
53 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051" 51 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
54 52
55 RDEPENDS_${PN} += "${PYTHON_PN}-threading" 53 RDEPENDS:${PN} += "python3-threading"
56""" 54"""
57 patchlines = [] 55 patchlines = []
58 for f in patches: 56 for f in patches:
@@ -80,7 +78,7 @@ class RecipeUtilsTests(OESelftestTestCase):
80 78
81-SRC_URI += "file://somefile" 79-SRC_URI += "file://somefile"
82- 80-
83 SRC_URI_append = " file://anotherfile" 81 SRC_URI:append = " file://anotherfile"
84""" 82"""
85 patchlines = [] 83 patchlines = []
86 for f in patches: 84 for f in patches:
@@ -105,7 +103,7 @@ class RecipeUtilsTests(OESelftestTestCase):
105 103
106-SRC_URI += "file://somefile" 104-SRC_URI += "file://somefile"
107- 105-
108-SRC_URI_append = " file://anotherfile" 106-SRC_URI:append = " file://anotherfile"
109""" 107"""
110 patchlines = [] 108 patchlines = []
111 for f in patches: 109 for f in patches:
diff --git a/meta/lib/oeqa/selftest/cases/reproducible.py b/meta/lib/oeqa/selftest/cases/reproducible.py
index 0d0259477e..80e830136f 100644
--- a/meta/lib/oeqa/selftest/cases/reproducible.py
+++ b/meta/lib/oeqa/selftest/cases/reproducible.py
@@ -9,35 +9,13 @@ import bb.utils
9import functools 9import functools
10import multiprocessing 10import multiprocessing
11import textwrap 11import textwrap
12import json
13import unittest
14import tempfile 12import tempfile
15import shutil 13import shutil
16import stat 14import stat
17import os 15import os
18import datetime 16import datetime
19 17
20# For sample packages, see:
21# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-0t7wr_oo/
22# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-4s9ejwyp/
23# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-haiwdlbr/
24# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201127-hwds3mcl/
25# https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20201203-sua0pzvc/
26# (both packages/ and packages-excluded/)
27
28# ruby-ri-docs, meson:
29#https://autobuilder.yocto.io/pub/repro-fail/oe-reproducible-20210215-0_td9la2/packages/diff-html/
30exclude_packages = [ 18exclude_packages = [
31 'glide',
32 'go-dep',
33 'go-helloworld',
34 'go-runtime',
35 'go_',
36 'go-',
37 'meson',
38 'ovmf-shell-efi',
39 'perf',
40 'ruby-ri-docs'
41 ] 19 ]
42 20
43def is_excluded(package): 21def is_excluded(package):
@@ -65,13 +43,14 @@ class CompareResult(object):
65 return (self.status, self.test) < (other.status, other.test) 43 return (self.status, self.test) < (other.status, other.test)
66 44
67class PackageCompareResults(object): 45class PackageCompareResults(object):
68 def __init__(self): 46 def __init__(self, exclusions):
69 self.total = [] 47 self.total = []
70 self.missing = [] 48 self.missing = []
71 self.different = [] 49 self.different = []
72 self.different_excluded = [] 50 self.different_excluded = []
73 self.same = [] 51 self.same = []
74 self.active_exclusions = set() 52 self.active_exclusions = set()
53 exclude_packages.extend((exclusions or "").split())
75 54
76 def add_result(self, r): 55 def add_result(self, r):
77 self.total.append(r) 56 self.total.append(r)
@@ -118,8 +97,9 @@ def compare_file(reference, test, diffutils_sysroot):
118 result.status = SAME 97 result.status = SAME
119 return result 98 return result
120 99
121def run_diffoscope(a_dir, b_dir, html_dir, **kwargs): 100def run_diffoscope(a_dir, b_dir, html_dir, max_report_size=0, **kwargs):
122 return runCmd(['diffoscope', '--no-default-limits', '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir], 101 return runCmd(['diffoscope', '--no-default-limits', '--max-report-size', str(max_report_size),
102 '--exclude-directory-metadata', 'yes', '--html-dir', html_dir, a_dir, b_dir],
123 **kwargs) 103 **kwargs)
124 104
125class DiffoscopeTests(OESelftestTestCase): 105class DiffoscopeTests(OESelftestTestCase):
@@ -149,10 +129,15 @@ class ReproducibleTests(OESelftestTestCase):
149 129
150 package_classes = ['deb', 'ipk', 'rpm'] 130 package_classes = ['deb', 'ipk', 'rpm']
151 131
132 # Maximum report size, in bytes
133 max_report_size = 250 * 1024 * 1024
134
152 # targets are the things we want to test the reproducibility of 135 # targets are the things we want to test the reproducibility of
153 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world'] 136 targets = ['core-image-minimal', 'core-image-sato', 'core-image-full-cmdline', 'core-image-weston', 'world']
137
154 # sstate targets are things to pull from sstate to potentially cut build/debugging time 138 # sstate targets are things to pull from sstate to potentially cut build/debugging time
155 sstate_targets = [] 139 sstate_targets = []
140
156 save_results = False 141 save_results = False
157 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ: 142 if 'OEQA_DEBUGGING_SAVED_OUTPUT' in os.environ:
158 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT'] 143 save_results = os.environ['OEQA_DEBUGGING_SAVED_OUTPUT']
@@ -167,11 +152,29 @@ class ReproducibleTests(OESelftestTestCase):
167 152
168 def setUpLocal(self): 153 def setUpLocal(self):
169 super().setUpLocal() 154 super().setUpLocal()
170 needed_vars = ['TOPDIR', 'TARGET_PREFIX', 'BB_NUMBER_THREADS'] 155 needed_vars = [
156 'TOPDIR',
157 'TARGET_PREFIX',
158 'BB_NUMBER_THREADS',
159 'BB_HASHSERVE',
160 'OEQA_REPRODUCIBLE_TEST_PACKAGE',
161 'OEQA_REPRODUCIBLE_TEST_TARGET',
162 'OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS',
163 'OEQA_REPRODUCIBLE_EXCLUDED_PACKAGES',
164 ]
171 bb_vars = get_bb_vars(needed_vars) 165 bb_vars = get_bb_vars(needed_vars)
172 for v in needed_vars: 166 for v in needed_vars:
173 setattr(self, v.lower(), bb_vars[v]) 167 setattr(self, v.lower(), bb_vars[v])
174 168
169 if bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE']:
170 self.package_classes = bb_vars['OEQA_REPRODUCIBLE_TEST_PACKAGE'].split()
171
172 if bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET']:
173 self.targets = bb_vars['OEQA_REPRODUCIBLE_TEST_TARGET'].split()
174
175 if bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS']:
176 self.sstate_targets = bb_vars['OEQA_REPRODUCIBLE_TEST_SSTATE_TARGETS'].split()
177
175 self.extraresults = {} 178 self.extraresults = {}
176 self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = '' 179 self.extraresults.setdefault('reproducible.rawlogs', {})['log'] = ''
177 self.extraresults.setdefault('reproducible', {}).setdefault('files', {}) 180 self.extraresults.setdefault('reproducible', {}).setdefault('files', {})
@@ -180,7 +183,7 @@ class ReproducibleTests(OESelftestTestCase):
180 self.extraresults['reproducible.rawlogs']['log'] += msg 183 self.extraresults['reproducible.rawlogs']['log'] += msg
181 184
182 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot): 185 def compare_packages(self, reference_dir, test_dir, diffutils_sysroot):
183 result = PackageCompareResults() 186 result = PackageCompareResults(self.oeqa_reproducible_excluded_packages)
184 187
185 old_cwd = os.getcwd() 188 old_cwd = os.getcwd()
186 try: 189 try:
@@ -219,12 +222,10 @@ class ReproducibleTests(OESelftestTestCase):
219 bb.utils.remove(tmpdir, recurse=True) 222 bb.utils.remove(tmpdir, recurse=True)
220 223
221 config = textwrap.dedent('''\ 224 config = textwrap.dedent('''\
222 INHERIT += "reproducible_build"
223 PACKAGE_CLASSES = "{package_classes}" 225 PACKAGE_CLASSES = "{package_classes}"
224 INHIBIT_PACKAGE_STRIP = "1"
225 TMPDIR = "{tmpdir}" 226 TMPDIR = "{tmpdir}"
226 LICENSE_FLAGS_WHITELIST = "commercial" 227 LICENSE_FLAGS_ACCEPTED = "commercial"
227 DISTRO_FEATURES_append = ' systemd pam' 228 DISTRO_FEATURES:append = ' pam'
228 USERADDEXTENSION = "useradd-staticids" 229 USERADDEXTENSION = "useradd-staticids"
229 USERADD_ERROR_DYNAMIC = "skip" 230 USERADD_ERROR_DYNAMIC = "skip"
230 USERADD_UID_TABLES += "files/static-passwd" 231 USERADD_UID_TABLES += "files/static-passwd"
@@ -242,7 +243,7 @@ class ReproducibleTests(OESelftestTestCase):
242 # mirror, forcing a complete build from scratch 243 # mirror, forcing a complete build from scratch
243 config += textwrap.dedent('''\ 244 config += textwrap.dedent('''\
244 SSTATE_DIR = "${TMPDIR}/sstate" 245 SSTATE_DIR = "${TMPDIR}/sstate"
245 SSTATE_MIRRORS = "" 246 SSTATE_MIRRORS = "file://.*/.*-native.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH file://.*/.*-cross.* http://sstate.yoctoproject.org/all/PATH;downloadfilename=PATH"
246 ''') 247 ''')
247 248
248 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT')) 249 self.logger.info("Building %s (sstate%s allowed)..." % (name, '' if use_sstate else ' NOT'))
@@ -309,9 +310,13 @@ class ReproducibleTests(OESelftestTestCase):
309 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)])) 310 self.copy_file(d.reference, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.reference)]))
310 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)])) 311 self.copy_file(d.test, '/'.join([save_dir, 'packages-excluded', strip_topdir(d.test)]))
311 312
312 if result.missing or result.different: 313 if result.different:
313 fails.append("The following %s packages are missing or different and not in exclusion list: %s" % 314 fails.append("The following %s packages are different and not in exclusion list:\n%s" %
314 (c, '\n'.join(r.test for r in (result.missing + result.different)))) 315 (c, '\n'.join(r.test for r in (result.different))))
316
317 if result.missing and len(self.sstate_targets) == 0:
318 fails.append("The following %s packages are missing and not in exclusion list:\n%s" %
319 (c, '\n'.join(r.test for r in (result.missing))))
315 320
316 # Clean up empty directories 321 # Clean up empty directories
317 if self.save_results: 322 if self.save_results:
@@ -325,7 +330,7 @@ class ReproducibleTests(OESelftestTestCase):
325 # Copy jquery to improve the diffoscope output usability 330 # Copy jquery to improve the diffoscope output usability
326 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js')) 331 self.copy_file(os.path.join(jquery_sysroot, 'usr/share/javascript/jquery/jquery.min.js'), os.path.join(package_html_dir, 'jquery.js'))
327 332
328 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, 333 run_diffoscope('reproducibleA', 'reproducibleB', package_html_dir, max_report_size=self.max_report_size,
329 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir) 334 native_sysroot=diffoscope_sysroot, ignore_status=True, cwd=package_dir)
330 335
331 if fails: 336 if fails:
diff --git a/meta/lib/oeqa/selftest/cases/resulttooltests.py b/meta/lib/oeqa/selftest/cases/resulttooltests.py
index dac5c46801..c3303f3fbb 100644
--- a/meta/lib/oeqa/selftest/cases/resulttooltests.py
+++ b/meta/lib/oeqa/selftest/cases/resulttooltests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -69,7 +71,7 @@ class ResultToolTests(OESelftestTestCase):
69 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results) 71 self.assertTrue('target_result1' in results['runtime/mydistro/qemux86/image'], msg="Pair not correct:%s" % results)
70 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results) 72 self.assertTrue('target_result3' in results['runtime/mydistro/qemux86-64/image'], msg="Pair not correct:%s" % results)
71 73
72 def test_regrresion_can_get_regression_result(self): 74 def test_regression_can_get_regression_result(self):
73 base_result_data = {'result': {'test1': {'status': 'PASSED'}, 75 base_result_data = {'result': {'test1': {'status': 'PASSED'},
74 'test2': {'status': 'PASSED'}, 76 'test2': {'status': 'PASSED'},
75 'test3': {'status': 'FAILED'}, 77 'test3': {'status': 'FAILED'},
@@ -96,3 +98,278 @@ class ResultToolTests(OESelftestTestCase):
96 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map) 98 resultutils.append_resultsdata(results, ResultToolTests.target_results_data, configmap=resultutils.flatten_map)
97 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results)) 99 self.assertEqual(len(results[''].keys()), 5, msg="Flattened results not correct %s" % str(results))
98 100
101 def test_results_without_metadata_can_be_compared(self):
102 base_configuration = {"configuration": {
103 "TEST_TYPE": "oeselftest",
104 "TESTSERIES": "series1",
105 "IMAGE_BASENAME": "image",
106 "IMAGE_PKGTYPE": "ipk",
107 "DISTRO": "mydistro",
108 "MACHINE": "qemux86",
109 "STARTTIME": 1672527600
110 }, "result": {}}
111 target_configuration = {"configuration": {
112 "TEST_TYPE": "oeselftest",
113 "TESTSERIES": "series1",
114 "IMAGE_BASENAME": "image",
115 "IMAGE_PKGTYPE": "ipk",
116 "DISTRO": "mydistro",
117 "MACHINE": "qemux86",
118 "STARTTIME": 1672527600
119 }, "result": {}}
120 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
121 msg="incorrect metadata filtering, tests without metadata should be compared")
122
123 def test_target_result_with_missing_metadata_can_not_be_compared(self):
124 base_configuration = {"configuration": {
125 "TEST_TYPE": "oeselftest",
126 "TESTSERIES": "series1",
127 "IMAGE_BASENAME": "image",
128 "IMAGE_PKGTYPE": "ipk",
129 "DISTRO": "mydistro",
130 "MACHINE": "qemux86",
131 "OESELFTEST_METADATA": {
132 "run_all_tests": True,
133 "run_tests": None,
134 "skips": None,
135 "machine": None,
136 "select_tags": ["toolchain-user", "toolchain-system"],
137 "exclude_tags": None
138 }}, "result": {}}
139 target_configuration = {"configuration": {"TEST_TYPE": "oeselftest",
140 "TESTSERIES": "series1",
141 "IMAGE_BASENAME": "image",
142 "IMAGE_PKGTYPE": "ipk",
143 "DISTRO": "mydistro",
144 "MACHINE": "qemux86",
145 "STARTTIME": 1672527600
146 }, "result": {}}
147 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
148 msg="incorrect metadata filtering, tests should not be compared")
149
150 def test_results_with_matching_metadata_can_be_compared(self):
151 base_configuration = {"configuration": {
152 "TEST_TYPE": "oeselftest",
153 "TESTSERIES": "series1",
154 "IMAGE_BASENAME": "image",
155 "IMAGE_PKGTYPE": "ipk",
156 "DISTRO": "mydistro",
157 "MACHINE": "qemux86",
158 "STARTTIME": 1672527600,
159 "OESELFTEST_METADATA": {"run_all_tests": True,
160 "run_tests": None,
161 "skips": None,
162 "machine": None,
163 "select_tags": ["toolchain-user", "toolchain-system"],
164 "exclude_tags": None}
165 }, "result": {}}
166 target_configuration = {"configuration": {
167 "TEST_TYPE": "oeselftest",
168 "TESTSERIES": "series1",
169 "IMAGE_BASENAME": "image",
170 "IMAGE_PKGTYPE": "ipk",
171 "DISTRO": "mydistro",
172 "MACHINE": "qemux86",
173 "STARTTIME": 1672527600,
174 "OESELFTEST_METADATA": {"run_all_tests": True,
175 "run_tests": None,
176 "skips": None,
177 "machine": None,
178 "select_tags": ["toolchain-user", "toolchain-system"],
179 "exclude_tags": None}
180 }, "result": {}}
181 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
182 msg="incorrect metadata filtering, tests with matching metadata should be compared")
183
184 def test_results_with_mismatching_metadata_can_not_be_compared(self):
185 base_configuration = {"configuration": {
186 "TEST_TYPE": "oeselftest",
187 "TESTSERIES": "series1",
188 "IMAGE_BASENAME": "image",
189 "IMAGE_PKGTYPE": "ipk",
190 "DISTRO": "mydistro",
191 "MACHINE": "qemux86",
192 "STARTTIME": 1672527600,
193 "OESELFTEST_METADATA": {"run_all_tests": True,
194 "run_tests": None,
195 "skips": None,
196 "machine": None,
197 "select_tags": ["toolchain-user", "toolchain-system"],
198 "exclude_tags": None}
199 }, "result": {}}
200 target_configuration = {"configuration": {
201 "TEST_TYPE": "oeselftest",
202 "TESTSERIES": "series1",
203 "IMAGE_BASENAME": "image",
204 "IMAGE_PKGTYPE": "ipk",
205 "DISTRO": "mydistro",
206 "MACHINE": "qemux86",
207 "STARTTIME": 1672527600,
208 "OESELFTEST_METADATA": {"run_all_tests": True,
209 "run_tests": None,
210 "skips": None,
211 "machine": None,
212 "select_tags": ["machine"],
213 "exclude_tags": None}
214 }, "result": {}}
215 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
216 msg="incorrect metadata filtering, tests with mismatching metadata should not be compared")
217
218 def test_metadata_matching_is_only_checked_for_relevant_test_type(self):
219 base_configuration = {"configuration": {"TEST_TYPE": "runtime",
220 "TESTSERIES": "series1",
221 "IMAGE_BASENAME": "image",
222 "IMAGE_PKGTYPE": "ipk",
223 "DISTRO": "mydistro",
224 "MACHINE": "qemux86",
225 "STARTTIME": 1672527600,
226 "OESELFTEST_METADATA": {"run_all_tests": True,
227 "run_tests": None,
228 "skips": None,
229 "machine": None,
230 "select_tags": ["toolchain-user", "toolchain-system"],
231 "exclude_tags": None}}, "result": {}}
232 target_configuration = {"configuration": {"TEST_TYPE": "runtime",
233 "TESTSERIES": "series1",
234 "IMAGE_BASENAME": "image",
235 "IMAGE_PKGTYPE": "ipk",
236 "DISTRO": "mydistro",
237 "MACHINE": "qemux86",
238 "STARTTIME": 1672527600,
239 "OESELFTEST_METADATA": {"run_all_tests": True,
240 "run_tests": None,
241 "skips": None,
242 "machine": None,
243 "select_tags": ["machine"],
244 "exclude_tags": None}}, "result": {}}
245 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
246 msg="incorrect metadata filtering, %s tests should be compared" % base_configuration['configuration']['TEST_TYPE'])
247
248 def test_machine_matches(self):
249 base_configuration = {"configuration": {
250 "TEST_TYPE": "runtime",
251 "MACHINE": "qemux86"}, "result": {}}
252 target_configuration = {"configuration": {
253 "TEST_TYPE": "runtime",
254 "MACHINE": "qemux86"
255 }, "result": {}}
256 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
257 msg="incorrect machine filtering, identical machine tests should be compared")
258
259 def test_machine_mismatches(self):
260 base_configuration = {"configuration": {
261 "TEST_TYPE": "runtime",
262 "MACHINE": "qemux86"
263 }, "result": {}}
264 target_configuration = {"configuration": {
265 "TEST_TYPE": "runtime",
266 "MACHINE": "qemux86_64"
267 }, "result": {}}
268 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
269 msg="incorrect machine filtering, mismatching machine tests should not be compared")
270
271 def test_can_not_compare_non_ltp_tests(self):
272 base_configuration = {"configuration": {
273 "TEST_TYPE": "runtime",
274 "MACHINE": "qemux86"
275 }, "result": {
276 "ltpresult_foo": {
277 "status": "PASSED"
278 }}}
279 target_configuration = {"configuration": {
280 "TEST_TYPE": "runtime",
281 "MACHINE": "qemux86_64"
282 }, "result": {
283 "bar": {
284 "status": "PASSED"
285 }}}
286 self.assertFalse(regression.can_be_compared(self.logger, base_configuration, target_configuration),
287 msg="incorrect ltpresult filtering, mismatching ltpresult content should not be compared")
288
289 def test_can_compare_ltp_tests(self):
290 base_configuration = {"configuration": {
291 "TEST_TYPE": "runtime",
292 "MACHINE": "qemux86"
293 }, "result": {
294 "ltpresult_foo": {
295 "status": "PASSED"
296 }}}
297 target_configuration = {"configuration": {
298 "TEST_TYPE": "runtime",
299 "MACHINE": "qemux86"
300 }, "result": {
301 "ltpresult_foo": {
302 "status": "PASSED"
303 }}}
304 self.assertTrue(regression.can_be_compared(self.logger, base_configuration, target_configuration),
305 msg="incorrect ltpresult filtering, matching ltpresult content should be compared")
306
307 def test_can_match_non_static_ptest_names(self):
308 base_configuration = {"a": {
309 "conf_X": {
310 "configuration": {
311 "TEST_TYPE": "runtime",
312 "MACHINE": "qemux86"
313 }, "result": {
314 "ptestresult.lttng-tools.foo_-_bar_-_moo": {
315 "status": "PASSED"
316 },
317 "ptestresult.babeltrace.bar_-_moo_-_foo": {
318 "status": "PASSED"
319 },
320 "ptestresult.babeltrace2.moo_-_foo_-_bar": {
321 "status": "PASSED"
322 },
323 "ptestresult.curl.test_0000__foo_out_of_bar": {
324 "status": "PASSED"
325 },
326 "ptestresult.dbus.test_0000__foo_out_of_bar,_remaining:_00:02,_took_0.032s,_duration:_03:32_": {
327 "status": "PASSED"
328 },
329 "ptestresult.binutils-ld.in testcase /foo/build-st-bar/moo/ctf.exp": {
330 "status": "PASSED"
331 },
332 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.30975 on target": {
333 "status": "PASSED"
334 },
335 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
336 "status": "PASSED"
337 }
338 }}}}
339 target_configuration = {"a": {
340 "conf_Y": {
341 "configuration": {
342 "TEST_TYPE": "runtime",
343 "MACHINE": "qemux86"
344 }, "result": {
345 "ptestresult.lttng-tools.foo_-_yyy_-_zzz": {
346 "status": "PASSED"
347 },
348 "ptestresult.babeltrace.bar_-_zzz_-_xxx": {
349 "status": "PASSED"
350 },
351 "ptestresult.babeltrace2.moo_-_xxx_-_yyy": {
352 "status": "PASSED"
353 },
354 "ptestresult.curl.test_0000__xxx_out_of_yyy": {
355 "status": "PASSED"
356 },
357 "ptestresult.dbus.test_0000__yyy_out_of_zzz,_remaining:_00:03,_took_0.034s,_duration:_03:30_": {
358 "status": "PASSED"
359 },
360 "ptestresult.binutils-ld.in testcase /xxx/build-st-yyy/zzz/ctf.exp": {
361 "status": "PASSED"
362 },
363 "ptestresult.gcc-libstdc++-v3.Couldn't create remote directory /tmp/runtest.45678 on target": {
364 "status": "PASSED"
365 },
366 "ptestresult.gcc-libgomp.Couldn't create remote directory /tmp/runtest.3657621 on": {
367 "status": "PASSED"
368 }
369 }}}}
370 regression.fixup_ptest_names(base_configuration, self.logger)
371 regression.fixup_ptest_names(target_configuration, self.logger)
372 result, resultstring = regression.compare_result(
373 self.logger, "A", "B", base_configuration["a"]["conf_X"], target_configuration["a"]["conf_Y"])
374 self.assertDictEqual(
375 result, {}, msg=f"ptests should be compared: {resultstring}")
diff --git a/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
new file mode 100644
index 0000000000..44e2c09a6f
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rootfspostcommandstests.py
@@ -0,0 +1,97 @@
1# SPDX-FileCopyrightText: Huawei Inc.
2#
3# SPDX-License-Identifier: MIT
4
5import os
6import oe
7import unittest
8from oeqa.selftest.case import OESelftestTestCase
9from oeqa.utils.commands import bitbake, get_bb_vars
10
11class ShadowUtilsTidyFiles(OESelftestTestCase):
12 """
13 Check if shadow image rootfs files are tidy.
14
15 The tests are focused on testing the functionality provided by the
16 'tidy_shadowutils_files' rootfs postprocess command (via
17 SORT_PASSWD_POSTPROCESS_COMMAND).
18 """
19
20 def sysconf_build(self):
21 """
22 Verify if shadow tidy files tests are to be run and if yes, build a
23 test image and return its sysconf rootfs path.
24 """
25
26 test_image = "core-image-minimal"
27
28 config = 'IMAGE_CLASSES += "extrausers"\n'
29 config += 'EXTRA_USERS_PARAMS = "groupadd -g 1000 oeqatester; "\n'
30 config += 'EXTRA_USERS_PARAMS += "useradd -p \'\' -u 1000 -N -g 1000 oeqatester; "\n'
31 self.write_config(config)
32
33 vars = get_bb_vars(("IMAGE_ROOTFS", "SORT_PASSWD_POSTPROCESS_COMMAND", "sysconfdir"),
34 test_image)
35 passwd_postprocess_cmd = vars["SORT_PASSWD_POSTPROCESS_COMMAND"]
36 self.assertIsNotNone(passwd_postprocess_cmd)
37 if (passwd_postprocess_cmd.strip() != 'tidy_shadowutils_files;'):
38 raise unittest.SkipTest("Testcase skipped as 'tidy_shadowutils_files' "
39 "rootfs post process command is not the set SORT_PASSWD_POSTPROCESS_COMMAND.")
40
41 rootfs = vars["IMAGE_ROOTFS"]
42 self.assertIsNotNone(rootfs)
43 sysconfdir = vars["sysconfdir"]
44 bitbake(test_image)
45 self.assertIsNotNone(sysconfdir)
46
47 return oe.path.join(rootfs, sysconfdir)
48
49 def test_shadowutils_backup_files(self):
50 """
51 Test that the rootfs doesn't include any known shadow backup files.
52 """
53
54 backup_files = (
55 'group-',
56 'gshadow-',
57 'passwd-',
58 'shadow-',
59 'subgid-',
60 'subuid-',
61 )
62
63 rootfs_sysconfdir = self.sysconf_build()
64 found = []
65 for backup_file in backup_files:
66 backup_filepath = oe.path.join(rootfs_sysconfdir, backup_file)
67 if os.path.exists(backup_filepath):
68 found.append(backup_file)
69 if (found):
70 raise Exception('The following shadow backup files were found in '
71 'the rootfs: %s' % found)
72
73 def test_shadowutils_sorted_files(self):
74 """
75 Test that the 'passwd' and the 'group' shadow utils files are ordered
76 by ID.
77 """
78
79 files = (
80 'passwd',
81 'group',
82 )
83
84 rootfs_sysconfdir = self.sysconf_build()
85 unsorted = []
86 for file in files:
87 filepath = oe.path.join(rootfs_sysconfdir, file)
88 with open(filepath, 'rb') as f:
89 ids = []
90 lines = f.readlines()
91 for line in lines:
92 entries = line.split(b':')
93 ids.append(int(entries[2]))
94 if (ids != sorted(ids)):
95 unsorted.append(file)
96 if (unsorted):
97 raise Exception("The following files were not sorted by ID as expected: %s" % unsorted)
diff --git a/meta/lib/oeqa/selftest/cases/rpmtests.py b/meta/lib/oeqa/selftest/cases/rpmtests.py
new file mode 100644
index 0000000000..902d7dca3d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rpmtests.py
@@ -0,0 +1,14 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake
9
10class BitbakeTests(OESelftestTestCase):
11
12 def test_rpm_filenames(self):
13 test_recipe = "testrpm"
14 bitbake(test_recipe)
diff --git a/meta/lib/oeqa/selftest/cases/runcmd.py b/meta/lib/oeqa/selftest/cases/runcmd.py
index fa6113d7fa..70047ca0ca 100644
--- a/meta/lib/oeqa/selftest/cases/runcmd.py
+++ b/meta/lib/oeqa/selftest/cases/runcmd.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -27,8 +29,8 @@ class RunCmdTests(OESelftestTestCase):
27 29
28 # The delta is intentionally smaller than the timeout, to detect cases where 30 # The delta is intentionally smaller than the timeout, to detect cases where
29 # we incorrectly apply the timeout more than once. 31 # we incorrectly apply the timeout more than once.
30 TIMEOUT = 5 32 TIMEOUT = 10
31 DELTA = 3 33 DELTA = 8
32 34
33 def test_result_okay(self): 35 def test_result_okay(self):
34 result = runCmd("true") 36 result = runCmd("true")
@@ -56,11 +58,11 @@ class RunCmdTests(OESelftestTestCase):
56 self.assertEqual(result.status, 0) 58 self.assertEqual(result.status, 0)
57 59
58 def test_result_assertion(self): 60 def test_result_assertion(self):
59 self.assertRaisesRegexp(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar", 61 self.assertRaisesRegex(AssertionError, "Command 'echo .* false' returned non-zero exit status 1:\nfoobar",
60 runCmd, "echo foobar >&2; false", shell=True) 62 runCmd, "echo foobar >&2; false", shell=True)
61 63
62 def test_result_exception(self): 64 def test_result_exception(self):
63 self.assertRaisesRegexp(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar", 65 self.assertRaisesRegex(CommandError, "Command 'echo .* false' returned non-zero exit status 1 with output: foobar",
64 runCmd, "echo foobar >&2; false", shell=True, assert_error=False) 66 runCmd, "echo foobar >&2; false", shell=True, assert_error=False)
65 67
66 def test_output(self): 68 def test_output(self):
diff --git a/meta/lib/oeqa/selftest/cases/runqemu.py b/meta/lib/oeqa/selftest/cases/runqemu.py
index 7e676bcb41..f01e1eec66 100644
--- a/meta/lib/oeqa/selftest/cases/runqemu.py
+++ b/meta/lib/oeqa/selftest/cases/runqemu.py
@@ -4,14 +4,17 @@
4# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5# 5#
6 6
7import os
7import re 8import re
8import tempfile
9import time 9import time
10import oe.types 10import oe.types
11from oeqa.core.decorator import OETestTag 11from oeqa.core.decorator import OETestTag
12from oeqa.core.decorator.data import skipIfNotArch, skipIfNotMachine
12from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import bitbake, runqemu, get_bb_var, runCmd 14from oeqa.utils.commands import bitbake, runqemu, get_bb_var
14 15
16
17@OETestTag("runqemu")
15class RunqemuTests(OESelftestTestCase): 18class RunqemuTests(OESelftestTestCase):
16 """Runqemu test class""" 19 """Runqemu test class"""
17 20
@@ -21,23 +24,26 @@ class RunqemuTests(OESelftestTestCase):
21 def setUpLocal(self): 24 def setUpLocal(self):
22 super(RunqemuTests, self).setUpLocal() 25 super(RunqemuTests, self).setUpLocal()
23 self.recipe = 'core-image-minimal' 26 self.recipe = 'core-image-minimal'
24 self.machine = 'qemux86-64' 27 self.machine = self.td['MACHINE']
25 self.fstypes = "ext4 iso hddimg wic.vmdk wic.qcow2 wic.vdi" 28 self.image_link_name = get_bb_var('IMAGE_LINK_NAME', self.recipe)
26 self.cmd_common = "runqemu nographic"
27 29
28 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), 'x86_64') 30 self.fstypes = "ext4"
31 if self.td["HOST_ARCH"] in ('i586', 'i686', 'x86_64'):
32 self.fstypes += " iso hddimg"
33 if self.machine == "qemux86-64":
34 self.fstypes += " wic.vmdk wic.qcow2 wic.vdi"
35
36 self.cmd_common = "runqemu nographic"
37 kvm = oe.types.qemu_use_kvm(get_bb_var('QEMU_USE_KVM'), self.td["TARGET_ARCH"])
29 if kvm: 38 if kvm:
30 self.cmd_common += " kvm" 39 self.cmd_common += " kvm"
31 40
32 self.write_config( 41 self.write_config(
33""" 42"""
34MACHINE = "%s"
35IMAGE_FSTYPES = "%s" 43IMAGE_FSTYPES = "%s"
36# 10 means 1 second 44# 10 means 1 second
37SYSLINUX_TIMEOUT = "10" 45SYSLINUX_TIMEOUT = "10"
38""" 46""" % self.fstypes)
39% (self.machine, self.fstypes)
40 )
41 47
42 if not RunqemuTests.image_is_ready: 48 if not RunqemuTests.image_is_ready:
43 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 49 RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
@@ -56,14 +62,17 @@ SYSLINUX_TIMEOUT = "10"
56 cmd = "%s %s ext4" % (self.cmd_common, self.machine) 62 cmd = "%s %s ext4" % (self.cmd_common, self.machine)
57 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 63 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
58 with open(qemu.qemurunnerlog) as f: 64 with open(qemu.qemurunnerlog) as f:
59 self.assertIn('rootfs.ext4', f.read(), "Failed: %s" % cmd) 65 regexp = r'\nROOTFS: .*\.ext4]\n'
66 self.assertRegex(f.read(), regexp, "Failed to find '%s' in '%s' after running '%s'" % (regexp, qemu.qemurunnerlog, cmd))
60 67
68 @skipIfNotArch(['i586', 'i686', 'x86_64'])
61 def test_boot_machine_iso(self): 69 def test_boot_machine_iso(self):
62 """Test runqemu machine iso""" 70 """Test runqemu machine iso"""
63 cmd = "%s %s iso" % (self.cmd_common, self.machine) 71 cmd = "%s %s iso" % (self.cmd_common, self.machine)
64 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 72 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
65 with open(qemu.qemurunnerlog) as f: 73 with open(qemu.qemurunnerlog) as f:
66 self.assertIn('media=cdrom', f.read(), "Failed: %s" % cmd) 74 text_in = 'media=cdrom'
75 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
67 76
68 def test_boot_recipe_image(self): 77 def test_boot_recipe_image(self):
69 """Test runqemu recipe-image""" 78 """Test runqemu recipe-image"""
@@ -72,20 +81,24 @@ SYSLINUX_TIMEOUT = "10"
72 with open(qemu.qemurunnerlog) as f: 81 with open(qemu.qemurunnerlog) as f:
73 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 82 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
74 83
75 84 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14963
85 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
76 def test_boot_recipe_image_vmdk(self): 86 def test_boot_recipe_image_vmdk(self):
77 """Test runqemu recipe-image vmdk""" 87 """Test runqemu recipe-image vmdk"""
78 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe) 88 cmd = "%s %s wic.vmdk" % (self.cmd_common, self.recipe)
79 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 89 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
80 with open(qemu.qemurunnerlog) as f: 90 with open(qemu.qemurunnerlog) as f:
81 self.assertIn('format=vmdk', f.read(), "Failed: %s" % cmd) 91 text_in = 'format=vmdk'
92 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
82 93
94 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
83 def test_boot_recipe_image_vdi(self): 95 def test_boot_recipe_image_vdi(self):
84 """Test runqemu recipe-image vdi""" 96 """Test runqemu recipe-image vdi"""
85 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe) 97 cmd = "%s %s wic.vdi" % (self.cmd_common, self.recipe)
86 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 98 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
87 with open(qemu.qemurunnerlog) as f: 99 with open(qemu.qemurunnerlog) as f:
88 self.assertIn('format=vdi', f.read(), "Failed: %s" % cmd) 100 text_in = 'format=vdi'
101 self.assertIn(text_in, f.read(), "Failed to find '%s' in '%s' after running '%s'" % (text_in, qemu.qemurunnerlog, cmd))
89 102
90 def test_boot_deploy(self): 103 def test_boot_deploy(self):
91 """Test runqemu deploy_dir_image""" 104 """Test runqemu deploy_dir_image"""
@@ -94,7 +107,7 @@ SYSLINUX_TIMEOUT = "10"
94 with open(qemu.qemurunnerlog) as f: 107 with open(qemu.qemurunnerlog) as f:
95 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read())) 108 self.assertTrue(qemu.runner.logged, "Failed: %s, %s" % (cmd, f.read()))
96 109
97 110 @skipIfNotArch(['i586', 'i686', 'x86_64'])
98 def test_boot_deploy_hddimg(self): 111 def test_boot_deploy_hddimg(self):
99 """Test runqemu deploy_dir_image hddimg""" 112 """Test runqemu deploy_dir_image hddimg"""
100 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) 113 cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image)
@@ -109,6 +122,7 @@ SYSLINUX_TIMEOUT = "10"
109 with open(qemu.qemurunnerlog) as f: 122 with open(qemu.qemurunnerlog) as f:
110 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd) 123 self.assertIn(' -netdev user', f.read(), "Failed: %s" % cmd)
111 124
125 @skipIfNotMachine("qemux86-64", "tests are qemux86-64 specific currently")
112 def test_boot_machine_slirp_qcow2(self): 126 def test_boot_machine_slirp_qcow2(self):
113 """Test runqemu machine slirp qcow2""" 127 """Test runqemu machine slirp qcow2"""
114 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine) 128 cmd = "%s slirp wic.qcow2 %s" % (self.cmd_common, self.machine)
@@ -118,7 +132,7 @@ SYSLINUX_TIMEOUT = "10"
118 132
119 def test_boot_qemu_boot(self): 133 def test_boot_qemu_boot(self):
120 """Test runqemu /path/to/image.qemuboot.conf""" 134 """Test runqemu /path/to/image.qemuboot.conf"""
121 qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine) 135 qemuboot_conf = "%s.qemuboot.conf" % (self.image_link_name)
122 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) 136 qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf)
123 if not os.path.exists(qemuboot_conf): 137 if not os.path.exists(qemuboot_conf):
124 self.skipTest("%s not found" % qemuboot_conf) 138 self.skipTest("%s not found" % qemuboot_conf)
@@ -129,7 +143,7 @@ SYSLINUX_TIMEOUT = "10"
129 143
130 def test_boot_rootfs(self): 144 def test_boot_rootfs(self):
131 """Test runqemu /path/to/rootfs.ext4""" 145 """Test runqemu /path/to/rootfs.ext4"""
132 rootfs = "%s-%s.ext4" % (self.recipe, self.machine) 146 rootfs = "%s.ext4" % (self.image_link_name)
133 rootfs = os.path.join(self.deploy_dir_image, rootfs) 147 rootfs = os.path.join(self.deploy_dir_image, rootfs)
134 if not os.path.exists(rootfs): 148 if not os.path.exists(rootfs):
135 self.skipTest("%s not found" % rootfs) 149 self.skipTest("%s not found" % rootfs)
@@ -149,26 +163,27 @@ SYSLINUX_TIMEOUT = "10"
149# bootup various filesystem types, including live image(iso and hddimg) 163# bootup various filesystem types, including live image(iso and hddimg)
150# where live image was not supported on all qemu architecture. 164# where live image was not supported on all qemu architecture.
151@OETestTag("machine") 165@OETestTag("machine")
166@OETestTag("runqemu")
152class QemuTest(OESelftestTestCase): 167class QemuTest(OESelftestTestCase):
153 168
154 @classmethod 169 @classmethod
155 def setUpClass(cls): 170 def setUpClass(cls):
156 super(QemuTest, cls).setUpClass() 171 super(QemuTest, cls).setUpClass()
157 cls.recipe = 'core-image-minimal' 172 cls.recipe = 'core-image-minimal'
158 cls.machine = get_bb_var('MACHINE') 173 cls.machine = get_bb_var('MACHINE')
159 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') 174 cls.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE')
175 cls.image_link_name = get_bb_var('IMAGE_LINK_NAME', cls.recipe)
160 cls.cmd_common = "runqemu nographic" 176 cls.cmd_common = "runqemu nographic"
161 cls.qemuboot_conf = "%s-%s.qemuboot.conf" % (cls.recipe, cls.machine) 177 cls.qemuboot_conf = "%s.qemuboot.conf" % (cls.image_link_name)
162 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf) 178 cls.qemuboot_conf = os.path.join(cls.deploy_dir_image, cls.qemuboot_conf)
163 bitbake(cls.recipe) 179 bitbake(cls.recipe)
164 180
165 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout): 181 def _start_qemu_shutdown_check_if_shutdown_succeeded(self, qemu, timeout):
182 # Allow the runner's LoggingThread instance to exit without errors
183 # (such as the exception "Console connection closed unexpectedly")
184 # as qemu will disappear when we shut it down
185 qemu.runner.allowexit()
166 qemu.run_serial("shutdown -h now") 186 qemu.run_serial("shutdown -h now")
167 # Stop thread will stop the LoggingThread instance used for logging
168 # qemu through serial console, stop thread will prevent this code
169 # from facing exception (Console connection closed unexpectedly)
170 # when qemu was shutdown by the above shutdown command
171 qemu.runner.stop_thread()
172 time_track = 0 187 time_track = 0
173 try: 188 try:
174 while True: 189 while True:
@@ -190,22 +205,12 @@ class QemuTest(OESelftestTestCase):
190 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 205 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
191 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 206 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
192 207
193 # Need to have portmap/rpcbind running to allow this test to work and 208 def test_qemu_can_boot_nfs_and_shutdown(self):
194 # current autobuilder setup does not have this. 209 rootfs_tar = "%s.tar.bz2" % (self.image_link_name)
195 def disabled_test_qemu_can_boot_nfs_and_shutdown(self):
196 self.assertExists(self.qemuboot_conf)
197 bitbake('meta-ide-support')
198 rootfs_tar = "%s-%s.tar.bz2" % (self.recipe, self.machine)
199 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar) 210 rootfs_tar = os.path.join(self.deploy_dir_image, rootfs_tar)
200 self.assertExists(rootfs_tar) 211 self.assertExists(rootfs_tar)
201 tmpdir = tempfile.mkdtemp(prefix='qemu_nfs') 212 cmd = "%s %s" % (self.cmd_common, rootfs_tar)
202 tmpdir_nfs = os.path.join(tmpdir, 'nfs')
203 cmd_extract_nfs = 'runqemu-extract-sdk %s %s' % (rootfs_tar, tmpdir_nfs)
204 result = runCmd(cmd_extract_nfs)
205 self.assertEqual(0, result.status, "runqemu-extract-sdk didn't run as expected. %s" % result.output)
206 cmd = "%s nfs %s %s" % (self.cmd_common, self.qemuboot_conf, tmpdir_nfs)
207 shutdown_timeout = 120 213 shutdown_timeout = 120
208 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: 214 with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu:
209 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout) 215 qemu_shutdown_succeeded = self._start_qemu_shutdown_check_if_shutdown_succeeded(qemu, shutdown_timeout)
210 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout)) 216 self.assertTrue(qemu_shutdown_succeeded, 'Failed: %s does not shutdown within timeout(%s)' % (self.machine, shutdown_timeout))
211 runCmd('rm -rf %s' % tmpdir)
diff --git a/meta/lib/oeqa/selftest/cases/runtime_test.py b/meta/lib/oeqa/selftest/cases/runtime_test.py
index b20c5b427b..12000aac16 100644
--- a/meta/lib/oeqa/selftest/cases/runtime_test.py
+++ b/meta/lib/oeqa/selftest/cases/runtime_test.py
@@ -1,24 +1,20 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5from oeqa.selftest.case import OESelftestTestCase 7from oeqa.selftest.case import OESelftestTestCase
6from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
7from oeqa.utils.sshcontrol import SSHControl 9from oeqa.core.decorator import OETestTag
8import os 10import os
9import re
10import tempfile 11import tempfile
11import shutil
12import oe.lsb 12import oe.lsb
13from oeqa.core.decorator.data import skipIfNotQemu 13from oeqa.core.decorator.data import skipIfNotQemu, skipIfNotMachine
14 14
15class TestExport(OESelftestTestCase): 15class TestExport(OESelftestTestCase):
16 16
17 @classmethod 17 @OETestTag("runqemu")
18 def tearDownClass(cls):
19 runCmd("rm -rf /tmp/sdk")
20 super(TestExport, cls).tearDownClass()
21
22 def test_testexport_basic(self): 18 def test_testexport_basic(self):
23 """ 19 """
24 Summary: Check basic testexport functionality with only ping test enabled. 20 Summary: Check basic testexport functionality with only ping test enabled.
@@ -29,7 +25,7 @@ class TestExport(OESelftestTestCase):
29 Author: Mariano Lopez <mariano.lopez@intel.com> 25 Author: Mariano Lopez <mariano.lopez@intel.com>
30 """ 26 """
31 27
32 features = 'INHERIT += "testexport"\n' 28 features = 'IMAGE_CLASSES += "testexport"\n'
33 # These aren't the actual IP addresses but testexport class needs something defined 29 # These aren't the actual IP addresses but testexport class needs something defined
34 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 30 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
35 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 31 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -70,7 +66,7 @@ class TestExport(OESelftestTestCase):
70 Author: Mariano Lopez <mariano.lopez@intel.com> 66 Author: Mariano Lopez <mariano.lopez@intel.com>
71 """ 67 """
72 68
73 features = 'INHERIT += "testexport"\n' 69 features = 'IMAGE_CLASSES += "testexport"\n'
74 # These aren't the actual IP addresses but testexport class needs something defined 70 # These aren't the actual IP addresses but testexport class needs something defined
75 features += 'TEST_SERVER_IP = "192.168.7.1"\n' 71 features += 'TEST_SERVER_IP = "192.168.7.1"\n'
76 features += 'TEST_TARGET_IP = "192.168.7.1"\n' 72 features += 'TEST_TARGET_IP = "192.168.7.1"\n'
@@ -95,21 +91,23 @@ class TestExport(OESelftestTestCase):
95 msg = "Couldn't find SDK tarball: %s" % tarball_path 91 msg = "Couldn't find SDK tarball: %s" % tarball_path
96 self.assertEqual(os.path.isfile(tarball_path), True, msg) 92 self.assertEqual(os.path.isfile(tarball_path), True, msg)
97 93
98 # Extract SDK and run tar from SDK 94 with tempfile.TemporaryDirectory() as tmpdirname:
99 result = runCmd("%s -y -d /tmp/sdk" % tarball_path) 95 # Extract SDK and run tar from SDK
100 self.assertEqual(0, result.status, "Couldn't extract SDK") 96 result = runCmd("%s -y -d %s" % (tarball_path, tmpdirname))
97 self.assertEqual(0, result.status, "Couldn't extract SDK")
101 98
102 env_script = result.output.split()[-1] 99 env_script = result.output.split()[-1]
103 result = runCmd(". %s; which tar" % env_script, shell=True) 100 result = runCmd(". %s; which tar" % env_script, shell=True)
104 self.assertEqual(0, result.status, "Couldn't setup SDK environment") 101 self.assertEqual(0, result.status, "Couldn't setup SDK environment")
105 is_sdk_tar = True if "/tmp/sdk" in result.output else False 102 is_sdk_tar = True if tmpdirname in result.output else False
106 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") 103 self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment")
107 104
108 tar_sdk = result.output 105 tar_sdk = result.output
109 result = runCmd("%s --version" % tar_sdk) 106 result = runCmd("%s --version" % tar_sdk)
110 self.assertEqual(0, result.status, "Couldn't run tar from SDK") 107 self.assertEqual(0, result.status, "Couldn't run tar from SDK")
111 108
112 109
110@OETestTag("runqemu")
113class TestImage(OESelftestTestCase): 111class TestImage(OESelftestTestCase):
114 112
115 def test_testimage_install(self): 113 def test_testimage_install(self):
@@ -123,15 +121,30 @@ class TestImage(OESelftestTestCase):
123 if get_bb_var('DISTRO') == 'poky-tiny': 121 if get_bb_var('DISTRO') == 'poky-tiny':
124 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 122 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
125 123
126 features = 'INHERIT += "testimage"\n' 124 features = 'IMAGE_CLASSES += "testimage"\n'
127 features += 'IMAGE_INSTALL_append = " libssl"\n' 125 features += 'IMAGE_INSTALL:append = " libssl"\n'
128 features += 'TEST_SUITES = "ping ssh selftest"\n' 126 features += 'TEST_SUITES = "ping ssh selftest"\n'
129 self.write_config(features) 127 self.write_config(features)
130 128
131 # Build core-image-sato and testimage
132 bitbake('core-image-full-cmdline socat') 129 bitbake('core-image-full-cmdline socat')
133 bitbake('-c testimage core-image-full-cmdline') 130 bitbake('-c testimage core-image-full-cmdline')
134 131
132 def test_testimage_slirp(self):
133 """
134 Summary: Check basic testimage functionality with qemu and slirp networking.
135 """
136
137 features = '''
138IMAGE_CLASSES:append = " testimage"
139IMAGE_FEATURES:append = " ssh-server-dropbear"
140IMAGE_ROOTFS_EXTRA_SPACE:append = "${@bb.utils.contains("IMAGE_CLASSES", "testimage", " + 5120", "", d)}"
141TEST_RUNQEMUPARAMS += " slirp"
142'''
143 self.write_config(features)
144
145 bitbake('core-image-minimal')
146 bitbake('-c testimage core-image-minimal')
147
135 def test_testimage_dnf(self): 148 def test_testimage_dnf(self):
136 """ 149 """
137 Summary: Check package feeds functionality for dnf 150 Summary: Check package feeds functionality for dnf
@@ -142,7 +155,7 @@ class TestImage(OESelftestTestCase):
142 if get_bb_var('DISTRO') == 'poky-tiny': 155 if get_bb_var('DISTRO') == 'poky-tiny':
143 self.skipTest('core-image-full-cmdline not buildable for poky-tiny') 156 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
144 157
145 features = 'INHERIT += "testimage"\n' 158 features = 'IMAGE_CLASSES += "testimage"\n'
146 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n' 159 features += 'TEST_SUITES = "ping ssh dnf_runtime dnf.DnfBasicTest.test_dnf_help"\n'
147 # We don't yet know what the server ip and port will be - they will be patched 160 # We don't yet know what the server ip and port will be - they will be patched
148 # in at the start of the on-image test 161 # in at the start of the on-image test
@@ -164,10 +177,49 @@ class TestImage(OESelftestTestCase):
164 features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home 177 features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
165 self.write_config(features) 178 self.write_config(features)
166 179
180 bitbake('core-image-full-cmdline socat')
181 bitbake('-c testimage core-image-full-cmdline')
182
183 def test_testimage_apt(self):
184 """
185 Summary: Check package feeds functionality for apt
186 Expected: 1. Check that remote package feeds can be accessed
187 Product: oe-core
188 Author: Ferry Toth <fntoth@gmail.com>
189 """
190 if get_bb_var('DISTRO') == 'poky-tiny':
191 self.skipTest('core-image-full-cmdline not buildable for poky-tiny')
192
193 features = 'IMAGE_CLASSES += "testimage"\n'
194 features += 'TEST_SUITES = "ping ssh apt.AptRepoTest.test_apt_install_from_repo"\n'
195 # We don't yet know what the server ip and port will be - they will be patched
196 # in at the start of the on-image test
197 features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n'
198 features += 'EXTRA_IMAGE_FEATURES += "package-management"\n'
199 features += 'PACKAGE_CLASSES = "package_deb"\n'
200 # We need gnupg on the target to install keys
201 features += 'IMAGE_INSTALL:append:pn-core-image-full-cmdline = " gnupg"\n'
202
203 bitbake('gnupg-native -c addto_recipe_sysroot')
204
205 # Enable package feed signing
206 self.gpg_home = tempfile.mkdtemp(prefix="oeqa-feed-sign-")
207 self.track_for_cleanup(self.gpg_home)
208 signing_key_dir = os.path.join(self.testlayer_path, 'files', 'signing')
209 runCmd('gpgconf --list-dirs --homedir %s; gpg -v --batch --homedir %s --import %s' % (self.gpg_home, self.gpg_home, os.path.join(signing_key_dir, 'key.secret')), native_sysroot=get_bb_var("RECIPE_SYSROOT_NATIVE", "gnupg-native"), shell=True)
210 features += 'INHERIT += "sign_package_feed"\n'
211 features += 'PACKAGE_FEED_GPG_NAME = "testuser"\n'
212 features += 'PACKAGE_FEED_GPG_PASSPHRASE_FILE = "%s"\n' % os.path.join(signing_key_dir, 'key.passphrase')
213 features += 'GPG_PATH = "%s"\n' % self.gpg_home
214 features += 'PSEUDO_IGNORE_PATHS .= ",%s"\n' % self.gpg_home
215 self.write_config(features)
216
167 # Build core-image-sato and testimage 217 # Build core-image-sato and testimage
168 bitbake('core-image-full-cmdline socat') 218 bitbake('core-image-full-cmdline socat')
169 bitbake('-c testimage core-image-full-cmdline') 219 bitbake('-c testimage core-image-full-cmdline')
170 220
221 # https://bugzilla.yoctoproject.org/show_bug.cgi?id=14966
222 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
171 def test_testimage_virgl_gtk_sdl(self): 223 def test_testimage_virgl_gtk_sdl(self):
172 """ 224 """
173 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends 225 Summary: Check host-assisted accelerate OpenGL functionality in qemu with gtk and SDL frontends
@@ -190,25 +242,26 @@ class TestImage(OESelftestTestCase):
190 242
191 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native') 243 qemu_packageconfig = get_bb_var('PACKAGECONFIG', 'qemu-system-native')
192 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 244 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
193 features = 'INHERIT += "testimage"\n' 245 features = 'IMAGE_CLASSES += "testimage"\n'
194 if 'gtk+' not in qemu_packageconfig: 246 if 'gtk+' not in qemu_packageconfig:
195 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " gtk+"\n' 247 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " gtk+"\n'
196 if 'sdl' not in qemu_packageconfig: 248 if 'sdl' not in qemu_packageconfig:
197 features += 'PACKAGECONFIG_append_pn-qemu-system-native = " sdl"\n' 249 features += 'PACKAGECONFIG:append:pn-qemu-system-native = " sdl"\n'
198 if 'opengl' not in qemu_distrofeatures: 250 if 'opengl' not in qemu_distrofeatures:
199 features += 'DISTRO_FEATURES_append = " opengl"\n' 251 features += 'DISTRO_FEATURES:append = " opengl"\n'
200 features += 'TEST_SUITES = "ping ssh virgl"\n' 252 features += 'TEST_SUITES = "ping ssh virgl"\n'
201 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 253 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
202 features += 'IMAGE_INSTALL_append = " kmscube"\n' 254 features += 'IMAGE_INSTALL:append = " kmscube"\n'
203 features_gtk = features + 'TEST_RUNQEMUPARAMS = "gtk gl"\n' 255 features_gtk = features + 'TEST_RUNQEMUPARAMS += " gtk gl"\n'
204 self.write_config(features_gtk) 256 self.write_config(features_gtk)
205 bitbake('core-image-minimal') 257 bitbake('core-image-minimal')
206 bitbake('-c testimage core-image-minimal') 258 bitbake('-c testimage core-image-minimal')
207 features_sdl = features + 'TEST_RUNQEMUPARAMS = "sdl gl"\n' 259 features_sdl = features + 'TEST_RUNQEMUPARAMS += " sdl gl"\n'
208 self.write_config(features_sdl) 260 self.write_config(features_sdl)
209 bitbake('core-image-minimal') 261 bitbake('core-image-minimal')
210 bitbake('-c testimage core-image-minimal') 262 bitbake('-c testimage core-image-minimal')
211 263
264 @skipIfNotMachine("qemux86-64", "test needs qemux86-64")
212 def test_testimage_virgl_headless(self): 265 def test_testimage_virgl_headless(self):
213 """ 266 """
214 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend 267 Summary: Check host-assisted accelerate OpenGL functionality in qemu with egl-headless frontend
@@ -218,28 +271,25 @@ class TestImage(OESelftestTestCase):
218 Author: Alexander Kanavin <alex.kanavin@gmail.com> 271 Author: Alexander Kanavin <alex.kanavin@gmail.com>
219 """ 272 """
220 import subprocess, os 273 import subprocess, os
221 try: 274
222 content = os.listdir("/dev/dri") 275 distro = oe.lsb.distro_identifier()
223 if len([i for i in content if i.startswith('render')]) == 0: 276 if distro and (distro in ['debian-9', 'debian-10', 'centos-7', 'centos-8', 'ubuntu-16.04', 'ubuntu-18.04'] or
224 self.skipTest("No render nodes found in /dev/dri: %s" %(content)) 277 distro.startswith('almalinux') or distro.startswith('rocky')):
225 except FileNotFoundError: 278 self.skipTest('virgl headless cannot be tested with %s' %(distro))
226 self.skipTest("/dev/dri directory does not exist; no render nodes available on this machine.") 279
227 try:
228 dripath = subprocess.check_output("pkg-config --variable=dridriverdir dri", shell=True)
229 except subprocess.CalledProcessError as e:
230 self.skipTest("Could not determine the path to dri drivers on the host via pkg-config.\nPlease install Mesa development files (particularly, dri.pc) on the host machine.")
231 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native') 280 qemu_distrofeatures = get_bb_var('DISTRO_FEATURES', 'qemu-system-native')
232 features = 'INHERIT += "testimage"\n' 281 features = 'IMAGE_CLASSES += "testimage"\n'
233 if 'opengl' not in qemu_distrofeatures: 282 if 'opengl' not in qemu_distrofeatures:
234 features += 'DISTRO_FEATURES_append = " opengl"\n' 283 features += 'DISTRO_FEATURES:append = " opengl"\n'
235 features += 'TEST_SUITES = "ping ssh virgl"\n' 284 features += 'TEST_SUITES = "ping ssh virgl"\n'
236 features += 'IMAGE_FEATURES_append = " ssh-server-dropbear"\n' 285 features += 'IMAGE_FEATURES:append = " ssh-server-dropbear"\n'
237 features += 'IMAGE_INSTALL_append = " kmscube"\n' 286 features += 'IMAGE_INSTALL:append = " kmscube"\n'
238 features += 'TEST_RUNQEMUPARAMS = "egl-headless"\n' 287 features += 'TEST_RUNQEMUPARAMS += " egl-headless"\n'
239 self.write_config(features) 288 self.write_config(features)
240 bitbake('core-image-minimal') 289 bitbake('core-image-minimal')
241 bitbake('-c testimage core-image-minimal') 290 bitbake('-c testimage core-image-minimal')
242 291
292@OETestTag("runqemu")
243class Postinst(OESelftestTestCase): 293class Postinst(OESelftestTestCase):
244 294
245 def init_manager_loop(self, init_manager): 295 def init_manager_loop(self, init_manager):
@@ -260,7 +310,7 @@ class Postinst(OESelftestTestCase):
260 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n' 310 features += 'IMAGE_FEATURES += "package-management empty-root-password"\n'
261 features += 'PACKAGE_CLASSES = "%s"\n' % classes 311 features += 'PACKAGE_CLASSES = "%s"\n' % classes
262 if init_manager == "systemd": 312 if init_manager == "systemd":
263 features += 'DISTRO_FEATURES_append = " systemd"\n' 313 features += 'DISTRO_FEATURES:append = " systemd usrmerge"\n'
264 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n' 314 features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n'
265 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n' 315 features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n'
266 features += 'VIRTUAL-RUNTIME_initscripts = ""\n' 316 features += 'VIRTUAL-RUNTIME_initscripts = ""\n'
@@ -280,7 +330,7 @@ class Postinst(OESelftestTestCase):
280 330
281 331
282 332
283 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 333 @skipIfNotQemu()
284 def test_postinst_rootfs_and_boot_sysvinit(self): 334 def test_postinst_rootfs_and_boot_sysvinit(self):
285 """ 335 """
286 Summary: The purpose of this test case is to verify Post-installation 336 Summary: The purpose of this test case is to verify Post-installation
@@ -301,7 +351,7 @@ class Postinst(OESelftestTestCase):
301 self.init_manager_loop("sysvinit") 351 self.init_manager_loop("sysvinit")
302 352
303 353
304 @skipIfNotQemu('qemuall', 'Test only runs in qemu') 354 @skipIfNotQemu()
305 def test_postinst_rootfs_and_boot_systemd(self): 355 def test_postinst_rootfs_and_boot_systemd(self):
306 """ 356 """
307 Summary: The purpose of this test case is to verify Post-installation 357 Summary: The purpose of this test case is to verify Post-installation
@@ -357,6 +407,7 @@ class Postinst(OESelftestTestCase):
357 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")), 407 self.assertFalse(os.path.isfile(os.path.join(hosttestdir, "rootfs-after-failure")),
358 "rootfs-after-failure file was created") 408 "rootfs-after-failure file was created")
359 409
410@OETestTag("runqemu")
360class SystemTap(OESelftestTestCase): 411class SystemTap(OESelftestTestCase):
361 """ 412 """
362 Summary: The purpose of this test case is to verify native crosstap 413 Summary: The purpose of this test case is to verify native crosstap
@@ -377,14 +428,14 @@ TEST_SERVER_IP = "192.168.7.1"
377TEST_TARGET_IP = "192.168.7.2" 428TEST_TARGET_IP = "192.168.7.2"
378 429
379EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs" 430EXTRA_IMAGE_FEATURES += "tools-profile dbg-pkgs"
380IMAGE_FEATURES_append = " ssh-server-dropbear" 431IMAGE_FEATURES:append = " ssh-server-dropbear"
381 432
382# enables kernel debug symbols 433# enables kernel debug symbols
383KERNEL_EXTRA_FEATURES_append = " features/debug/debug-kernel.scc" 434KERNEL_EXTRA_FEATURES:append = " features/debug/debug-kernel.scc"
384KERNEL_EXTRA_FEATURES_append = " features/systemtap/systemtap.scc" 435KERNEL_EXTRA_FEATURES:append = " features/systemtap/systemtap.scc"
385 436
386# add systemtap run-time into target image if it is not there yet 437# add systemtap run-time into target image if it is not there yet
387IMAGE_INSTALL_append = " systemtap-runtime" 438IMAGE_INSTALL:append = " systemtap-runtime"
388""" 439"""
389 440
390 def test_crosstap_helloworld(self): 441 def test_crosstap_helloworld(self):
@@ -433,4 +484,3 @@ IMAGE_INSTALL_append = " systemtap-runtime"
433 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples 484 cmd = "crosstap -r root@192.168.7.2 -s %s/process/ syscalls_by_pid.stp" % systemtap_examples
434 result = runCmd(cmd) 485 result = runCmd(cmd)
435 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output) 486 self.assertEqual(0, result.status, 'crosstap syscalls_by_pid returned a non 0 status:%s' % result.output)
436
diff --git a/meta/lib/oeqa/selftest/cases/rust.py b/meta/lib/oeqa/selftest/cases/rust.py
new file mode 100644
index 0000000000..ad14189c6d
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/rust.py
@@ -0,0 +1,231 @@
1# SPDX-License-Identifier: MIT
2import os
3import subprocess
4import time
5from oeqa.core.decorator import OETestTag
6from oeqa.core.case import OEPTestResultTestCase
7from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu, Command
9from oeqa.utils.sshcontrol import SSHControl
10
11def parse_results(filename):
12 tests = {}
13 with open(filename, "r") as f:
14 lines = f.readlines()
15 for line in lines:
16 if "..." in line and "test [" in line:
17 test = line.split("test ")[1].split(" ... ")[0]
18 if "] " in test:
19 test = test.split("] ", 1)[1]
20 result = line.split(" ... ")[1].strip()
21 if result == "ok":
22 result = "PASS"
23 elif result == "failed":
24 result = "FAIL"
25 elif "ignored" in result:
26 result = "SKIPPED"
27 if test in tests:
28 if tests[test] != result:
29 print("Duplicate and mismatching result %s for %s" % (result, test))
30 else:
31 print("Duplicate result %s for %s" % (result, test))
32 else:
33 tests[test] = result
34 return tests
35
36# Total time taken for testing is of about 2hr 20min, with PARALLEL_MAKE set to 40 number of jobs.
37@OETestTag("toolchain-system")
38@OETestTag("toolchain-user")
39@OETestTag("runqemu")
40class RustSelfTestSystemEmulated(OESelftestTestCase, OEPTestResultTestCase):
41 def test_rust(self, *args, **kwargs):
42 # Disable Rust Oe-selftest
43 #self.skipTest("The Rust Oe-selftest is disabled.")
44
45 # Skip mips32 target since it is unstable with rust tests
46 machine = get_bb_var('MACHINE')
47 if machine == "qemumips":
48 self.skipTest("The mips32 target is skipped for Rust Oe-selftest.")
49
50 # build remote-test-server before image build
51 recipe = "rust"
52 start_time = time.time()
53 bitbake("{} -c test_compile".format(recipe))
54 builddir = get_bb_var("RUSTSRC", "rust")
55 # build core-image-minimal with required packages
56 default_installed_packages = ["libgcc", "libstdc++", "libatomic", "libgomp"]
57 features = []
58 features.append('IMAGE_FEATURES += "ssh-server-dropbear"')
59 features.append('CORE_IMAGE_EXTRA_INSTALL += "{0}"'.format(" ".join(default_installed_packages)))
60 self.write_config("\n".join(features))
61 bitbake("core-image-minimal")
62
63 # Exclude the test folders that error out while building
64 # TODO: Fix the errors and include them for testing
65 # no-fail-fast: Run all tests regardless of failure.
66 # bless: First runs rustfmt to format the codebase,
67 # then runs tidy checks.
68 exclude_list = [
69 'compiler/rustc',
70 'compiler/rustc_interface/src/tests.rs',
71 'library/panic_abort',
72 'library/panic_unwind',
73 'library/test/src/stats/tests.rs',
74 'src/bootstrap/builder/tests.rs',
75 'src/doc/rustc',
76 'src/doc/rustdoc',
77 'src/doc/unstable-book',
78 'src/librustdoc',
79 'src/rustdoc-json-types',
80 'src/tools/compiletest/src/common.rs',
81 'src/tools/lint-docs',
82 'src/tools/rust-analyzer',
83 'src/tools/rustdoc-themes',
84 'src/tools/tidy',
85 'tests/assembly/asm/aarch64-outline-atomics.rs',
86 'tests/codegen/abi-main-signature-32bit-c-int.rs',
87 'tests/codegen/abi-repr-ext.rs',
88 'tests/codegen/abi-x86-interrupt.rs',
89 'tests/codegen/branch-protection.rs',
90 'tests/codegen/catch-unwind.rs',
91 'tests/codegen/cf-protection.rs',
92 'tests/codegen/enum-bounds-check-derived-idx.rs',
93 'tests/codegen/force-unwind-tables.rs',
94 'tests/codegen/intrinsic-no-unnamed-attr.rs',
95 'tests/codegen/issues/issue-103840.rs',
96 'tests/codegen/issues/issue-47278.rs',
97 'tests/codegen/issues/issue-73827-bounds-check-index-in-subexpr.rs',
98 'tests/codegen/lifetime_start_end.rs',
99 'tests/codegen/local-generics-in-exe-internalized.rs',
100 'tests/codegen/match-unoptimized.rs',
101 'tests/codegen/noalias-rwlockreadguard.rs',
102 'tests/codegen/non-terminate/nonempty-infinite-loop.rs',
103 'tests/codegen/noreturn-uninhabited.rs',
104 'tests/codegen/repr-transparent-aggregates-3.rs',
105 'tests/codegen/riscv-abi/call-llvm-intrinsics.rs',
106 'tests/codegen/riscv-abi/riscv64-lp64f-lp64d-abi.rs',
107 'tests/codegen/riscv-abi/riscv64-lp64d-abi.rs',
108 'tests/codegen/sse42-implies-crc32.rs',
109 'tests/codegen/thread-local.rs',
110 'tests/codegen/uninit-consts.rs',
111 'tests/pretty/raw-str-nonexpr.rs',
112 'tests/run-make',
113 'tests/run-make-fulldeps',
114 'tests/rustdoc',
115 'tests/rustdoc-json',
116 'tests/rustdoc-js-std',
117 'tests/rustdoc-ui/cfg-test.rs',
118 'tests/rustdoc-ui/check-cfg-test.rs',
119 'tests/rustdoc-ui/display-output.rs',
120 'tests/rustdoc-ui/doc-comment-multi-line-attr.rs',
121 'tests/rustdoc-ui/doc-comment-multi-line-cfg-attr.rs',
122 'tests/rustdoc-ui/doc-test-doctest-feature.rs',
123 'tests/rustdoc-ui/doctest-multiline-crate-attribute.rs',
124 'tests/rustdoc-ui/doctest-output.rs',
125 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
126 'tests/rustdoc-ui/failed-doctest-compile-fail.rs',
127 'tests/rustdoc-ui/issue-80992.rs',
128 'tests/rustdoc-ui/issue-91134.rs',
129 'tests/rustdoc-ui/nocapture-fail.rs',
130 'tests/rustdoc-ui/nocapture.rs',
131 'tests/rustdoc-ui/no-run-flag.rs',
132 'tests/rustdoc-ui/run-directory.rs',
133 'tests/rustdoc-ui/test-no_std.rs',
134 'tests/rustdoc-ui/test-type.rs',
135 'tests/rustdoc/unit-return.rs',
136 'tests/ui/abi/stack-probes-lto.rs',
137 'tests/ui/abi/stack-probes.rs',
138 'tests/ui/array-slice-vec/subslice-patterns-const-eval-match.rs',
139 'tests/ui/asm/x86_64/sym.rs',
140 'tests/ui/associated-type-bounds/fn-apit.rs',
141 'tests/ui/associated-type-bounds/fn-dyn-apit.rs',
142 'tests/ui/associated-type-bounds/fn-wrap-apit.rs',
143 'tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs',
144 'tests/ui/drop/dynamic-drop.rs',
145 'tests/ui/empty_global_asm.rs',
146 'tests/ui/functions-closures/fn-help-with-err.rs',
147 'tests/ui/linkage-attr/issue-10755.rs',
148 'tests/ui/macros/restricted-shadowing-legacy.rs',
149 'tests/ui/process/nofile-limit.rs',
150 'tests/ui/process/process-panic-after-fork.rs',
151 'tests/ui/process/process-sigpipe.rs',
152 'tests/ui/simd/target-feature-mixup.rs',
153 'tests/ui/structs-enums/multiple-reprs.rs',
154 'src/tools/jsondoclint',
155 'src/tools/replace-version-placeholder',
156 'tests/codegen/abi-efiapi.rs',
157 'tests/codegen/abi-sysv64.rs',
158 'tests/codegen/align-byval.rs',
159 'tests/codegen/align-fn.rs',
160 'tests/codegen/asm-powerpc-clobbers.rs',
161 'tests/codegen/async-fn-debug-awaitee-field.rs',
162 'tests/codegen/binary-search-index-no-bound-check.rs',
163 'tests/codegen/call-metadata.rs',
164 'tests/codegen/debug-column.rs',
165 'tests/codegen/debug-limited.rs',
166 'tests/codegen/debuginfo-generic-closure-env-names.rs',
167 'tests/codegen/drop.rs',
168 'tests/codegen/dst-vtable-align-nonzero.rs',
169 'tests/codegen/enable-lto-unit-splitting.rs',
170 'tests/codegen/enum/enum-u128.rs',
171 'tests/codegen/fn-impl-trait-self.rs',
172 'tests/codegen/inherit_overflow.rs',
173 'tests/codegen/inline-function-args-debug-info.rs',
174 'tests/codegen/intrinsics/mask.rs',
175 'tests/codegen/intrinsics/transmute-niched.rs',
176 'tests/codegen/issues/issue-73258.rs',
177 'tests/codegen/issues/issue-75546.rs',
178 'tests/codegen/issues/issue-77812.rs',
179 'tests/codegen/issues/issue-98156-const-arg-temp-lifetime.rs',
180 'tests/codegen/llvm-ident.rs',
181 'tests/codegen/mainsubprogram.rs',
182 'tests/codegen/move-operands.rs',
183 'tests/codegen/repr/transparent-mips64.rs',
184 'tests/mir-opt/',
185 'tests/rustdoc-json',
186 'tests/rustdoc-ui/doc-test-rustdoc-feature.rs',
187 'tests/rustdoc-ui/no-run-flag.rs',
188 'tests/ui-fulldeps/',
189 'tests/ui/numbers-arithmetic/u128.rs'
190 ]
191
192 exclude_fail_tests = " ".join([" --exclude " + item for item in exclude_list])
193 # Add exclude_fail_tests with other test arguments
194 testargs = exclude_fail_tests + " --doc --no-fail-fast --bless"
195
196 # wrap the execution with a qemu instance.
197 # Tests are run with 512 tasks in parallel to execute all tests very quickly
198 with runqemu("core-image-minimal", runqemuparams = "nographic", qemuparams = "-m 512") as qemu:
199 # Copy remote-test-server to image through scp
200 host_sys = get_bb_var("RUST_BUILD_SYS", "rust")
201 ssh = SSHControl(ip=qemu.ip, logfile=qemu.sshlog, user="root")
202 ssh.copy_to(builddir + "/build/" + host_sys + "/stage1-tools-bin/remote-test-server","~/")
203 # Execute remote-test-server on image through background ssh
204 command = '~/remote-test-server --bind 0.0.0.0:12345 -v'
205 sshrun=subprocess.Popen(("ssh", '-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no', '-f', "root@%s" % qemu.ip, command), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
206 # Get the values of variables.
207 tcpath = get_bb_var("TARGET_SYS", "rust")
208 targetsys = get_bb_var("RUST_TARGET_SYS", "rust")
209 rustlibpath = get_bb_var("WORKDIR", "rust")
210 tmpdir = get_bb_var("TMPDIR", "rust")
211
212 # Set path for target-poky-linux-gcc, RUST_TARGET_PATH and hosttools.
213 cmd = " export PATH=%s/recipe-sysroot-native/usr/bin:$PATH;" % rustlibpath
214 cmd = cmd + " export TARGET_VENDOR=\"-poky\";"
215 cmd = cmd + " export PATH=%s/recipe-sysroot-native/usr/bin/%s:%s/hosttools:$PATH;" % (rustlibpath, tcpath, tmpdir)
216 cmd = cmd + " export RUST_TARGET_PATH=%s/rust-targets;" % rustlibpath
217 # Trigger testing.
218 cmd = cmd + " export TEST_DEVICE_ADDR=\"%s:12345\";" % qemu.ip
219 cmd = cmd + " cd %s; python3 src/bootstrap/bootstrap.py test %s --target %s" % (builddir, testargs, targetsys)
220 retval = runCmd(cmd)
221 end_time = time.time()
222
223 resultlog = rustlibpath + "/results-log.txt"
224 with open(resultlog, "w") as f:
225 f.write(retval.output)
226
227 ptestsuite = "rust"
228 self.ptest_section(ptestsuite, duration = int(end_time - start_time), logfile=resultlog)
229 test_results = parse_results(resultlog)
230 for test in test_results:
231 self.ptest_result(ptestsuite, test, test_results[test])
diff --git a/meta/lib/oeqa/selftest/cases/selftest.py b/meta/lib/oeqa/selftest/cases/selftest.py
index af080dcf03..a80a8651a5 100644
--- a/meta/lib/oeqa/selftest/cases/selftest.py
+++ b/meta/lib/oeqa/selftest/cases/selftest.py
@@ -1,9 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import importlib 7import importlib
6from oeqa.utils.commands import runCmd
7import oeqa.selftest 8import oeqa.selftest
8from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
9 10
diff --git a/meta/lib/oeqa/selftest/cases/signing.py b/meta/lib/oeqa/selftest/cases/signing.py
index a28c7eb19a..18cce0ba25 100644
--- a/meta/lib/oeqa/selftest/cases/signing.py
+++ b/meta/lib/oeqa/selftest/cases/signing.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -145,7 +147,7 @@ class Signing(OESelftestTestCase):
145 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir 147 feature += 'GPG_PATH = "%s"\n' % self.gpg_dir
146 feature += 'SSTATE_DIR = "%s"\n' % sstatedir 148 feature += 'SSTATE_DIR = "%s"\n' % sstatedir
147 # Any mirror might have partial sstate without .sig files, triggering failures 149 # Any mirror might have partial sstate without .sig files, triggering failures
148 feature += 'SSTATE_MIRRORS_forcevariable = ""\n' 150 feature += 'SSTATE_MIRRORS:forcevariable = ""\n'
149 151
150 self.write_config(feature) 152 self.write_config(feature)
151 153
@@ -159,13 +161,13 @@ class Signing(OESelftestTestCase):
159 bitbake('-c clean %s' % test_recipe) 161 bitbake('-c clean %s' % test_recipe)
160 bitbake('-c populate_lic %s' % test_recipe) 162 bitbake('-c populate_lic %s' % test_recipe)
161 163
162 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz.sig') 164 recipe_sig = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst.sig')
163 recipe_tgz = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tgz') 165 recipe_archive = glob.glob(sstatedir + '/*/*/*:ed:*_populate_lic.tar.zst')
164 166
165 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.') 167 self.assertEqual(len(recipe_sig), 1, 'Failed to find .sig file.')
166 self.assertEqual(len(recipe_tgz), 1, 'Failed to find .tgz file.') 168 self.assertEqual(len(recipe_archive), 1, 'Failed to find .tar.zst file.')
167 169
168 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_tgz[0])) 170 ret = runCmd('gpg --homedir %s --verify %s %s' % (self.gpg_dir, recipe_sig[0], recipe_archive[0]))
169 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30 171 # gpg: Signature made Thu 22 Oct 2015 01:45:09 PM EEST using RSA key ID 61EEFB30
170 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>" 172 # gpg: Good signature from "testuser (nocomment) <testuser@email.com>"
171 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.') 173 self.assertIn('gpg: Good signature from', ret.output, 'Package signed incorrectly.')
@@ -189,7 +191,7 @@ class LockedSignatures(OESelftestTestCase):
189 191
190 bitbake(test_recipe) 192 bitbake(test_recipe)
191 # Generate locked sigs include file 193 # Generate locked sigs include file
192 bitbake('-S none %s' % test_recipe) 194 bitbake('-S lockedsigs %s' % test_recipe)
193 195
194 feature = 'require %s\n' % locked_sigs_file 196 feature = 'require %s\n' % locked_sigs_file
195 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n' 197 feature += 'SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n'
@@ -206,7 +208,7 @@ class LockedSignatures(OESelftestTestCase):
206 # Use uuid so hash equivalance server isn't triggered 208 # Use uuid so hash equivalance server isn't triggered
207 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend' 209 recipe_append_file = test_recipe + '_' + get_bb_var('PV', test_recipe) + '.bbappend'
208 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file) 210 recipe_append_path = os.path.join(templayerdir, 'recipes-test', test_recipe, recipe_append_file)
209 feature = 'SUMMARY_${PN} = "test locked signature%s"\n' % uuid.uuid4() 211 feature = 'SUMMARY:${PN} = "test locked signature%s"\n' % uuid.uuid4()
210 212
211 os.mkdir(os.path.join(templayerdir, 'recipes-test')) 213 os.mkdir(os.path.join(templayerdir, 'recipes-test'))
212 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe)) 214 os.mkdir(os.path.join(templayerdir, 'recipes-test', test_recipe))
diff --git a/meta/lib/oeqa/selftest/cases/spdx.py b/meta/lib/oeqa/selftest/cases/spdx.py
new file mode 100644
index 0000000000..05fc4e390b
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/spdx.py
@@ -0,0 +1,54 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import json
8import os
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake, get_bb_var, runCmd
11
12class SPDXCheck(OESelftestTestCase):
13
14 @classmethod
15 def setUpClass(cls):
16 super(SPDXCheck, cls).setUpClass()
17 bitbake("python3-spdx-tools-native")
18 bitbake("-c addto_recipe_sysroot python3-spdx-tools-native")
19
20 def check_recipe_spdx(self, high_level_dir, spdx_file, target_name):
21 config = """
22INHERIT += "create-spdx"
23"""
24 self.write_config(config)
25
26 deploy_dir = get_bb_var("DEPLOY_DIR")
27 machine_var = get_bb_var("MACHINE")
28 # qemux86-64 creates the directory qemux86_64
29 machine_dir = machine_var.replace("-", "_")
30
31 full_file_path = os.path.join(deploy_dir, "spdx", machine_dir, high_level_dir, spdx_file)
32
33 try:
34 os.remove(full_file_path)
35 except FileNotFoundError:
36 pass
37
38 bitbake("%s -c create_spdx" % target_name)
39
40 def check_spdx_json(filename):
41 with open(filename) as f:
42 report = json.load(f)
43 self.assertNotEqual(report, None)
44 self.assertNotEqual(report["SPDXID"], None)
45
46 python = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'nativepython3')
47 validator = os.path.join(get_bb_var('STAGING_BINDIR', 'python3-spdx-tools-native'), 'pyspdxtools')
48 result = runCmd("{} {} -i {}".format(python, validator, filename))
49
50 self.assertExists(full_file_path)
51 result = check_spdx_json(full_file_path)
52
53 def test_spdx_base_files(self):
54 self.check_recipe_spdx("packages", "base-files.spdx.json", "base-files")
diff --git a/meta/lib/oeqa/selftest/cases/sstate.py b/meta/lib/oeqa/selftest/cases/sstate.py
deleted file mode 100644
index 80ce9e353c..0000000000
--- a/meta/lib/oeqa/selftest/cases/sstate.py
+++ /dev/null
@@ -1,67 +0,0 @@
1#
2# SPDX-License-Identifier: MIT
3#
4
5import datetime
6import unittest
7import os
8import re
9import shutil
10
11import oeqa.utils.ftools as ftools
12from oeqa.selftest.case import OESelftestTestCase
13from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer
14
15
16class SStateBase(OESelftestTestCase):
17
18 def setUpLocal(self):
19 super(SStateBase, self).setUpLocal()
20 self.temp_sstate_location = None
21 needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
22 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
23 bb_vars = get_bb_vars(needed_vars)
24 self.sstate_path = bb_vars['SSTATE_DIR']
25 self.hostdistro = bb_vars['NATIVELSBSTRING']
26 self.tclibc = bb_vars['TCLIBC']
27 self.tune_arch = bb_vars['TUNE_ARCH']
28 self.topdir = bb_vars['TOPDIR']
29 self.target_vendor = bb_vars['TARGET_VENDOR']
30 self.target_os = bb_vars['TARGET_OS']
31 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
32
33 # Creates a special sstate configuration with the option to add sstate mirrors
34 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
35 self.temp_sstate_location = temp_sstate_location
36
37 if self.temp_sstate_location:
38 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
39 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
40 self.append_config(config_temp_sstate)
41 self.track_for_cleanup(temp_sstate_path)
42 bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
43 self.sstate_path = bb_vars['SSTATE_DIR']
44 self.hostdistro = bb_vars['NATIVELSBSTRING']
45 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
46
47 if add_local_mirrors:
48 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
49 self.append_config(config_set_sstate_if_not_set)
50 for local_mirror in add_local_mirrors:
51 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
52 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
53 self.append_config(config_sstate_mirror)
54
55 # Returns a list containing sstate files
56 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
57 result = []
58 for root, dirs, files in os.walk(self.sstate_path):
59 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
60 for f in files:
61 if re.search(filename_regex, f):
62 result.append(f)
63 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
64 for f in files:
65 if re.search(filename_regex, f):
66 result.append(f)
67 return result
diff --git a/meta/lib/oeqa/selftest/cases/sstatetests.py b/meta/lib/oeqa/selftest/cases/sstatetests.py
index c46e8ba489..86d6cd7464 100644
--- a/meta/lib/oeqa/selftest/cases/sstatetests.py
+++ b/meta/lib/oeqa/selftest/cases/sstatetests.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -7,54 +9,77 @@ import shutil
7import glob 9import glob
8import subprocess 10import subprocess
9import tempfile 11import tempfile
12import datetime
13import re
10 14
15from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer, get_bb_vars
11from oeqa.selftest.case import OESelftestTestCase 16from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer, create_temp_layer 17from oeqa.core.decorator import OETestTag
13from oeqa.selftest.cases.sstate import SStateBase
14 18
19import oe
15import bb.siggen 20import bb.siggen
16 21
17class SStateTests(SStateBase): 22# Set to True to preserve stamp files after test execution for debugging failures
18 def test_autorev_sstate_works(self): 23keep_temp_files = False
19 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV} 24
20 # when PV does not contain SRCPV 25class SStateBase(OESelftestTestCase):
21 26
22 tempdir = tempfile.mkdtemp(prefix='sstate_autorev') 27 def setUpLocal(self):
23 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir') 28 super(SStateBase, self).setUpLocal()
24 self.track_for_cleanup(tempdir) 29 self.temp_sstate_location = None
25 self.track_for_cleanup(tempdldir) 30 needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH',
26 create_temp_layer(tempdir, 'selftestrecipetool') 31 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS']
27 self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir) 32 bb_vars = get_bb_vars(needed_vars)
28 self.append_config("DL_DIR = \"%s\"" % tempdldir) 33 self.sstate_path = bb_vars['SSTATE_DIR']
29 runCmd('bitbake-layers add-layer %s' % tempdir) 34 self.hostdistro = bb_vars['NATIVELSBSTRING']
30 35 self.tclibc = bb_vars['TCLIBC']
31 # Use dbus-wait as a local git repo we can add a commit between two builds in 36 self.tune_arch = bb_vars['TUNE_ARCH']
32 pn = 'dbus-wait' 37 self.topdir = bb_vars['TOPDIR']
33 srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517' 38 self.target_vendor = bb_vars['TARGET_VENDOR']
34 url = 'git://git.yoctoproject.org/dbus-wait' 39 self.target_os = bb_vars['TARGET_OS']
35 result = runCmd('git clone %s noname' % url, cwd=tempdir) 40 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
36 srcdir = os.path.join(tempdir, 'noname') 41
37 result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir) 42 def track_for_cleanup(self, path):
38 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory') 43 if not keep_temp_files:
39 44 super().track_for_cleanup(path)
40 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb') 45
41 os.makedirs(os.path.dirname(recipefile)) 46 # Creates a special sstate configuration with the option to add sstate mirrors
42 srcuri = 'git://' + srcdir + ';protocol=file' 47 def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]):
43 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri]) 48 self.temp_sstate_location = temp_sstate_location
44 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output) 49
45 50 if self.temp_sstate_location:
46 with open(recipefile, 'a') as f: 51 temp_sstate_path = os.path.join(self.builddir, "temp_sstate_%s" % datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
47 f.write('SRCREV = "${AUTOREV}"\n') 52 config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path
48 f.write('PV = "1.0"\n') 53 self.append_config(config_temp_sstate)
49 54 self.track_for_cleanup(temp_sstate_path)
50 bitbake("dbus-wait-test -c fetch") 55 bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING'])
51 with open(os.path.join(srcdir, "bar.txt"), "w") as f: 56 self.sstate_path = bb_vars['SSTATE_DIR']
52 f.write("foo") 57 self.hostdistro = bb_vars['NATIVELSBSTRING']
53 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir) 58 self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro)
54 bitbake("dbus-wait-test -c unpack") 59
55 60 if add_local_mirrors:
56 61 config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""'
57 # Test sstate files creation and their location 62 self.append_config(config_set_sstate_if_not_set)
63 for local_mirror in add_local_mirrors:
64 self.assertFalse(os.path.join(local_mirror) == os.path.join(self.sstate_path), msg='Cannot add the current sstate path as a sstate mirror')
65 config_sstate_mirror = "SSTATE_MIRRORS += \"file://.* file:///%s/PATH\"" % local_mirror
66 self.append_config(config_sstate_mirror)
67
68 # Returns a list containing sstate files
69 def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True):
70 result = []
71 for root, dirs, files in os.walk(self.sstate_path):
72 if distro_specific and re.search(r"%s/%s/[a-z0-9]{2}/[a-z0-9]{2}$" % (self.sstate_path, self.hostdistro), root):
73 for f in files:
74 if re.search(filename_regex, f):
75 result.append(f)
76 if distro_nonspecific and re.search(r"%s/[a-z0-9]{2}/[a-z0-9]{2}$" % self.sstate_path, root):
77 for f in files:
78 if re.search(filename_regex, f):
79 result.append(f)
80 return result
81
82 # Test sstate files creation and their location and directory perms
58 def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True): 83 def run_test_sstate_creation(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True, should_pass=True):
59 self.config_sstate(temp_sstate_location, [self.sstate_path]) 84 self.config_sstate(temp_sstate_location, [self.sstate_path])
60 85
@@ -63,12 +88,25 @@ class SStateTests(SStateBase):
63 else: 88 else:
64 bitbake(['-ccleansstate'] + targets) 89 bitbake(['-ccleansstate'] + targets)
65 90
91 # We need to test that the env umask have does not effect sstate directory creation
92 # So, first, we'll get the current umask and set it to something we know incorrect
93 # See: sstate_task_postfunc for correct umask of os.umask(0o002)
94 import os
95 def current_umask():
96 current_umask = os.umask(0)
97 os.umask(current_umask)
98 return current_umask
99
100 orig_umask = current_umask()
101 # Set it to a umask we know will be 'wrong'
102 os.umask(0o022)
103
66 bitbake(targets) 104 bitbake(targets)
67 file_tracker = [] 105 file_tracker = []
68 results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific) 106 results = self.search_sstate('|'.join(map(str, targets)), distro_specific, distro_nonspecific)
69 if distro_nonspecific: 107 if distro_nonspecific:
70 for r in results: 108 for r in results:
71 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo", "_fetch.tgz.siginfo", "_unpack.tgz.siginfo", "_patch.tgz.siginfo")): 109 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo", "_fetch.tar.zst.siginfo", "_unpack.tar.zst.siginfo", "_patch.tar.zst.siginfo")):
72 continue 110 continue
73 file_tracker.append(r) 111 file_tracker.append(r)
74 else: 112 else:
@@ -79,17 +117,18 @@ class SStateTests(SStateBase):
79 else: 117 else:
80 self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker))) 118 self.assertTrue(not file_tracker , msg="Found sstate files in the wrong place for: %s (found %s)" % (', '.join(map(str, targets)), str(file_tracker)))
81 119
82 def test_sstate_creation_distro_specific_pass(self): 120 # Now we'll walk the tree to check the mode and see if things are incorrect.
83 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) 121 badperms = []
84 122 for root, dirs, files in os.walk(self.sstate_path):
85 def test_sstate_creation_distro_specific_fail(self): 123 for directory in dirs:
86 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) 124 if (os.stat(os.path.join(root, directory)).st_mode & 0o777) != 0o775:
125 badperms.append(os.path.join(root, directory))
87 126
88 def test_sstate_creation_distro_nonspecific_pass(self): 127 # Return to original umask
89 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) 128 os.umask(orig_umask)
90 129
91 def test_sstate_creation_distro_nonspecific_fail(self): 130 if should_pass:
92 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) 131 self.assertTrue(badperms , msg="Found sstate directories with the wrong permissions: %s (found %s)" % (', '.join(map(str, targets)), str(badperms)))
93 132
94 # Test the sstate files deletion part of the do_cleansstate task 133 # Test the sstate files deletion part of the do_cleansstate task
95 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): 134 def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True):
@@ -98,29 +137,15 @@ class SStateTests(SStateBase):
98 bitbake(['-ccleansstate'] + targets) 137 bitbake(['-ccleansstate'] + targets)
99 138
100 bitbake(targets) 139 bitbake(targets)
101 tgz_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific) 140 archives_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
102 self.assertTrue(tgz_created, msg="Could not find sstate .tgz files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_created))) 141 self.assertTrue(archives_created, msg="Could not find sstate .tar.zst files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_created)))
103 142
104 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific) 143 siginfo_created = self.search_sstate('|'.join(map(str, [s + r'.*?\.siginfo$' for s in targets])), distro_specific, distro_nonspecific)
105 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created))) 144 self.assertTrue(siginfo_created, msg="Could not find sstate .siginfo files for: %s (%s)" % (', '.join(map(str, targets)), str(siginfo_created)))
106 145
107 bitbake(['-ccleansstate'] + targets) 146 bitbake(['-ccleansstate'] + targets)
108 tgz_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific, distro_nonspecific) 147 archives_removed = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific, distro_nonspecific)
109 self.assertTrue(not tgz_removed, msg="do_cleansstate didn't remove .tgz sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(tgz_removed))) 148 self.assertTrue(not archives_removed, msg="do_cleansstate didn't remove .tar.zst sstate files for: %s (%s)" % (', '.join(map(str, targets)), str(archives_removed)))
110
111 def test_cleansstate_task_distro_specific_nonspecific(self):
112 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
113 targets.append('linux-libc-headers')
114 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
115
116 def test_cleansstate_task_distro_nonspecific(self):
117 self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
118
119 def test_cleansstate_task_distro_specific(self):
120 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
121 targets.append('linux-libc-headers')
122 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
123
124 149
125 # Test rebuilding of distro-specific sstate files 150 # Test rebuilding of distro-specific sstate files
126 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True): 151 def run_test_rebuild_distro_specific_sstate(self, targets, temp_sstate_location=True):
@@ -129,15 +154,15 @@ class SStateTests(SStateBase):
129 bitbake(['-ccleansstate'] + targets) 154 bitbake(['-ccleansstate'] + targets)
130 155
131 bitbake(targets) 156 bitbake(targets)
132 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=False, distro_nonspecific=True) 157 results = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=False, distro_nonspecific=True)
133 filtered_results = [] 158 filtered_results = []
134 for r in results: 159 for r in results:
135 if r.endswith(("_populate_lic.tgz", "_populate_lic.tgz.siginfo")): 160 if r.endswith(("_populate_lic.tar.zst", "_populate_lic.tar.zst.siginfo")):
136 continue 161 continue
137 filtered_results.append(r) 162 filtered_results.append(r)
138 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results))) 163 self.assertTrue(filtered_results == [], msg="Found distro non-specific sstate for: %s (%s)" % (', '.join(map(str, targets)), str(filtered_results)))
139 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) 164 file_tracker_1 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
140 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) 165 self.assertTrue(len(file_tracker_1) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
141 166
142 self.track_for_cleanup(self.distro_specific_sstate + "_old") 167 self.track_for_cleanup(self.distro_specific_sstate + "_old")
143 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old") 168 shutil.copytree(self.distro_specific_sstate, self.distro_specific_sstate + "_old")
@@ -145,15 +170,114 @@ class SStateTests(SStateBase):
145 170
146 bitbake(['-cclean'] + targets) 171 bitbake(['-cclean'] + targets)
147 bitbake(targets) 172 bitbake(targets)
148 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tgz$' for s in targets])), distro_specific=True, distro_nonspecific=False) 173 file_tracker_2 = self.search_sstate('|'.join(map(str, [s + r'.*?\.tar.zst$' for s in targets])), distro_specific=True, distro_nonspecific=False)
149 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files ware created for: %s" % ', '.join(map(str, targets))) 174 self.assertTrue(len(file_tracker_2) >= len(targets), msg = "Not all sstate files were created for: %s" % ', '.join(map(str, targets)))
150 175
151 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2] 176 not_recreated = [x for x in file_tracker_1 if x not in file_tracker_2]
152 self.assertTrue(not_recreated == [], msg="The following sstate files ware not recreated: %s" % ', '.join(map(str, not_recreated))) 177 self.assertTrue(not_recreated == [], msg="The following sstate files were not recreated: %s" % ', '.join(map(str, not_recreated)))
153 178
154 created_once = [x for x in file_tracker_2 if x not in file_tracker_1] 179 created_once = [x for x in file_tracker_2 if x not in file_tracker_1]
155 self.assertTrue(created_once == [], msg="The following sstate files ware created only in the second run: %s" % ', '.join(map(str, created_once))) 180 self.assertTrue(created_once == [], msg="The following sstate files were created only in the second run: %s" % ', '.join(map(str, created_once)))
181
182 def sstate_common_samesigs(self, configA, configB, allarch=False):
183
184 self.write_config(configA)
185 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
186 bitbake("world meta-toolchain -S none")
187 self.write_config(configB)
188 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
189 bitbake("world meta-toolchain -S none")
190
191 def get_files(d, result):
192 for root, dirs, files in os.walk(d):
193 for name in files:
194 if "meta-environment" in root or "cross-canadian" in root:
195 continue
196 if "do_build" not in name:
197 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
198 (_, task, _, shash) = name.rsplit(".", 3)
199 result[os.path.join(os.path.basename(root), task)] = shash
200
201 files1 = {}
202 files2 = {}
203 subdirs = sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux"))
204 if allarch:
205 subdirs.extend(sorted(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/all-*-linux")))
206
207 for subdir in subdirs:
208 nativesdkdir = os.path.basename(subdir)
209 get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir, files1)
210 get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir, files2)
211
212 self.maxDiff = None
213 self.assertEqual(files1, files2)
214
215class SStateTests(SStateBase):
216 def test_autorev_sstate_works(self):
217 # Test that a git repository which changes is correctly handled by SRCREV = ${AUTOREV}
156 218
219 tempdir = tempfile.mkdtemp(prefix='sstate_autorev')
220 tempdldir = tempfile.mkdtemp(prefix='sstate_autorev_dldir')
221 self.track_for_cleanup(tempdir)
222 self.track_for_cleanup(tempdldir)
223 create_temp_layer(tempdir, 'selftestrecipetool')
224 self.add_command_to_tearDown('bitbake-layers remove-layer %s' % tempdir)
225 self.append_config("DL_DIR = \"%s\"" % tempdldir)
226 runCmd('bitbake-layers add-layer %s' % tempdir)
227
228 # Use dbus-wait as a local git repo we can add a commit between two builds in
229 pn = 'dbus-wait'
230 srcrev = '6cc6077a36fe2648a5f993fe7c16c9632f946517'
231 url = 'git://git.yoctoproject.org/dbus-wait'
232 result = runCmd('git clone %s noname' % url, cwd=tempdir)
233 srcdir = os.path.join(tempdir, 'noname')
234 result = runCmd('git reset --hard %s' % srcrev, cwd=srcdir)
235 self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure script in source directory')
236
237 recipefile = os.path.join(tempdir, "recipes-test", "dbus-wait-test", 'dbus-wait-test_git.bb')
238 os.makedirs(os.path.dirname(recipefile))
239 srcuri = 'git://' + srcdir + ';protocol=file;branch=master'
240 result = runCmd(['recipetool', 'create', '-o', recipefile, srcuri])
241 self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
242
243 with open(recipefile, 'a') as f:
244 f.write('SRCREV = "${AUTOREV}"\n')
245 f.write('PV = "1.0"\n')
246
247 bitbake("dbus-wait-test -c fetch")
248 with open(os.path.join(srcdir, "bar.txt"), "w") as f:
249 f.write("foo")
250 result = runCmd('git add bar.txt; git commit -asm "add bar"', cwd=srcdir)
251 bitbake("dbus-wait-test -c unpack")
252
253class SStateCreation(SStateBase):
254 def test_sstate_creation_distro_specific_pass(self):
255 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
256
257 def test_sstate_creation_distro_specific_fail(self):
258 self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False)
259
260 def test_sstate_creation_distro_nonspecific_pass(self):
261 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
262
263 def test_sstate_creation_distro_nonspecific_fail(self):
264 self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
265
266class SStateCleanup(SStateBase):
267 def test_cleansstate_task_distro_specific_nonspecific(self):
268 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
269 targets.append('linux-libc-headers')
270 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
271
272 def test_cleansstate_task_distro_nonspecific(self):
273 self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
274
275 def test_cleansstate_task_distro_specific(self):
276 targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native']
277 targets.append('linux-libc-headers')
278 self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
279
280class SStateDistroTests(SStateBase):
157 def test_rebuild_distro_specific_sstate_cross_native_targets(self): 281 def test_rebuild_distro_specific_sstate_cross_native_targets(self):
158 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) 282 self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True)
159 283
@@ -163,48 +287,48 @@ class SStateTests(SStateBase):
163 def test_rebuild_distro_specific_sstate_native_target(self): 287 def test_rebuild_distro_specific_sstate_native_target(self):
164 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True) 288 self.run_test_rebuild_distro_specific_sstate(['binutils-native'], temp_sstate_location=True)
165 289
166 290class SStateCacheManagement(SStateBase):
167 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list 291 # Test the sstate-cache-management script. Each element in the global_config list is used with the corresponding element in the target_config list
168 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.sh (such as changing the value of MACHINE) 292 # global_config elements are expected to not generate any sstate files that would be removed by sstate-cache-management.py (such as changing the value of MACHINE)
169 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]): 293 def run_test_sstate_cache_management_script(self, target, global_config=[''], target_config=[''], ignore_patterns=[]):
170 self.assertTrue(global_config) 294 self.assertTrue(global_config)
171 self.assertTrue(target_config) 295 self.assertTrue(target_config)
172 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') 296 self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements')
173 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
174 297
175 # If buildhistory is enabled, we need to disable version-going-backwards 298 for idx in range(len(target_config)):
176 # QA checks for this test. It may report errors otherwise. 299 self.append_config(global_config[idx])
177 self.append_config('ERROR_QA_remove = "version-going-backwards"') 300 self.append_recipeinc(target, target_config[idx])
301 bitbake(target)
302 self.remove_config(global_config[idx])
303 self.remove_recipeinc(target, target_config[idx])
304
305 self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path])
178 306
179 # For not this only checks if random sstate tasks are handled correctly as a group. 307 # For now this only checks if random sstate tasks are handled correctly as a group.
180 # In the future we should add control over what tasks we check for. 308 # In the future we should add control over what tasks we check for.
181 309
182 sstate_archs_list = []
183 expected_remaining_sstate = [] 310 expected_remaining_sstate = []
184 for idx in range(len(target_config)): 311 for idx in range(len(target_config)):
185 self.append_config(global_config[idx]) 312 self.append_config(global_config[idx])
186 self.append_recipeinc(target, target_config[idx]) 313 self.append_recipeinc(target, target_config[idx])
187 sstate_arch = get_bb_var('SSTATE_PKGARCH', target)
188 if not sstate_arch in sstate_archs_list:
189 sstate_archs_list.append(sstate_arch)
190 if target_config[idx] == target_config[-1]: 314 if target_config[idx] == target_config[-1]:
191 target_sstate_before_build = self.search_sstate(target + r'.*?\.tgz$') 315 target_sstate_before_build = self.search_sstate(target + r'.*?\.tar.zst$')
192 bitbake("-cclean %s" % target) 316 bitbake("-cclean %s" % target)
193 result = bitbake(target, ignore_status=True) 317 result = bitbake(target, ignore_status=True)
194 if target_config[idx] == target_config[-1]: 318 if target_config[idx] == target_config[-1]:
195 target_sstate_after_build = self.search_sstate(target + r'.*?\.tgz$') 319 target_sstate_after_build = self.search_sstate(target + r'.*?\.tar.zst$')
196 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)] 320 expected_remaining_sstate += [x for x in target_sstate_after_build if x not in target_sstate_before_build if not any(pattern in x for pattern in ignore_patterns)]
197 self.remove_config(global_config[idx]) 321 self.remove_config(global_config[idx])
198 self.remove_recipeinc(target, target_config[idx]) 322 self.remove_recipeinc(target, target_config[idx])
199 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output)) 323 self.assertEqual(result.status, 0, msg = "build of %s failed with %s" % (target, result.output))
200 324
201 runCmd("sstate-cache-management.sh -y --cache-dir=%s --remove-duplicated --extra-archs=%s" % (self.sstate_path, ','.join(map(str, sstate_archs_list)))) 325 runCmd("sstate-cache-management.py -y --cache-dir=%s --remove-duplicated" % (self.sstate_path))
202 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tgz$') if not any(pattern in x for pattern in ignore_patterns)] 326 actual_remaining_sstate = [x for x in self.search_sstate(target + r'.*?\.tar.zst$') if not any(pattern in x for pattern in ignore_patterns)]
203 327
204 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate] 328 actual_not_expected = [x for x in actual_remaining_sstate if x not in expected_remaining_sstate]
205 self.assertFalse(actual_not_expected, msg="Files should have been removed but ware not: %s" % ', '.join(map(str, actual_not_expected))) 329 self.assertFalse(actual_not_expected, msg="Files should have been removed but were not: %s" % ', '.join(map(str, actual_not_expected)))
206 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate] 330 expected_not_actual = [x for x in expected_remaining_sstate if x not in actual_remaining_sstate]
207 self.assertFalse(expected_not_actual, msg="Extra files ware removed: %s" ', '.join(map(str, expected_not_actual))) 331 self.assertFalse(expected_not_actual, msg="Extra files were removed: %s" ', '.join(map(str, expected_not_actual)))
208 332
209 def test_sstate_cache_management_script_using_pr_1(self): 333 def test_sstate_cache_management_script_using_pr_1(self):
210 global_config = [] 334 global_config = []
@@ -242,6 +366,7 @@ class SStateTests(SStateBase):
242 target_config.append('') 366 target_config.append('')
243 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic']) 367 self.run_test_sstate_cache_management_script('m4', global_config, target_config, ignore_patterns=['populate_lic'])
244 368
369class SStateHashSameSigs(SStateBase):
245 def test_sstate_32_64_same_hash(self): 370 def test_sstate_32_64_same_hash(self):
246 """ 371 """
247 The sstate checksums for both native and target should not vary whether 372 The sstate checksums for both native and target should not vary whether
@@ -261,7 +386,7 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
261BB_SIGNATURE_HANDLER = "OEBasicHash" 386BB_SIGNATURE_HANDLER = "OEBasicHash"
262""") 387""")
263 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 388 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
264 bitbake("core-image-sato -S none") 389 bitbake("core-image-weston -S none")
265 self.write_config(""" 390 self.write_config("""
266MACHINE = "qemux86" 391MACHINE = "qemux86"
267TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" 392TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
@@ -273,12 +398,12 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb"
273BB_SIGNATURE_HANDLER = "OEBasicHash" 398BB_SIGNATURE_HANDLER = "OEBasicHash"
274""") 399""")
275 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 400 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
276 bitbake("core-image-sato -S none") 401 bitbake("core-image-weston -S none")
277 402
278 def get_files(d): 403 def get_files(d):
279 f = [] 404 f = []
280 for root, dirs, files in os.walk(d): 405 for root, dirs, files in os.walk(d):
281 if "core-image-sato" in root: 406 if "core-image-weston" in root:
282 # SDKMACHINE changing will change 407 # SDKMACHINE changing will change
283 # do_rootfs/do_testimage/do_build stamps of images which 408 # do_rootfs/do_testimage/do_build stamps of images which
284 # is safe to ignore. 409 # is safe to ignore.
@@ -306,7 +431,7 @@ NATIVELSBSTRING = \"DistroA\"
306BB_SIGNATURE_HANDLER = "OEBasicHash" 431BB_SIGNATURE_HANDLER = "OEBasicHash"
307""") 432""")
308 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 433 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
309 bitbake("core-image-sato -S none") 434 bitbake("core-image-weston -S none")
310 self.write_config(""" 435 self.write_config("""
311TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 436TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
312TCLIBCAPPEND = \"\" 437TCLIBCAPPEND = \"\"
@@ -314,7 +439,7 @@ NATIVELSBSTRING = \"DistroB\"
314BB_SIGNATURE_HANDLER = "OEBasicHash" 439BB_SIGNATURE_HANDLER = "OEBasicHash"
315""") 440""")
316 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 441 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
317 bitbake("core-image-sato -S none") 442 bitbake("core-image-weston -S none")
318 443
319 def get_files(d): 444 def get_files(d):
320 f = [] 445 f = []
@@ -327,6 +452,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
327 self.maxDiff = None 452 self.maxDiff = None
328 self.assertCountEqual(files1, files2) 453 self.assertCountEqual(files1, files2)
329 454
455class SStateHashSameSigs2(SStateBase):
330 def test_sstate_allarch_samesigs(self): 456 def test_sstate_allarch_samesigs(self):
331 """ 457 """
332 The sstate checksums of allarch packages should be independent of whichever 458 The sstate checksums of allarch packages should be independent of whichever
@@ -341,13 +467,15 @@ TCLIBCAPPEND = \"\"
341MACHINE = \"qemux86-64\" 467MACHINE = \"qemux86-64\"
342BB_SIGNATURE_HANDLER = "OEBasicHash" 468BB_SIGNATURE_HANDLER = "OEBasicHash"
343""" 469"""
470 #OLDEST_KERNEL is arch specific so set to a different value here for testing
344 configB = """ 471 configB = """
345TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" 472TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\"
346TCLIBCAPPEND = \"\" 473TCLIBCAPPEND = \"\"
347MACHINE = \"qemuarm\" 474MACHINE = \"qemuarm\"
475OLDEST_KERNEL = \"3.3.0\"
348BB_SIGNATURE_HANDLER = "OEBasicHash" 476BB_SIGNATURE_HANDLER = "OEBasicHash"
349""" 477"""
350 self.sstate_allarch_samesigs(configA, configB) 478 self.sstate_common_samesigs(configA, configB, allarch=True)
351 479
352 def test_sstate_nativesdk_samesigs_multilib(self): 480 def test_sstate_nativesdk_samesigs_multilib(self):
353 """ 481 """
@@ -360,7 +488,7 @@ TCLIBCAPPEND = \"\"
360MACHINE = \"qemux86-64\" 488MACHINE = \"qemux86-64\"
361require conf/multilib.conf 489require conf/multilib.conf
362MULTILIBS = \"multilib:lib32\" 490MULTILIBS = \"multilib:lib32\"
363DEFAULTTUNE_virtclass-multilib-lib32 = \"x86\" 491DEFAULTTUNE:virtclass-multilib-lib32 = \"x86\"
364BB_SIGNATURE_HANDLER = "OEBasicHash" 492BB_SIGNATURE_HANDLER = "OEBasicHash"
365""" 493"""
366 configB = """ 494 configB = """
@@ -371,36 +499,9 @@ require conf/multilib.conf
371MULTILIBS = \"\" 499MULTILIBS = \"\"
372BB_SIGNATURE_HANDLER = "OEBasicHash" 500BB_SIGNATURE_HANDLER = "OEBasicHash"
373""" 501"""
374 self.sstate_allarch_samesigs(configA, configB) 502 self.sstate_common_samesigs(configA, configB)
375
376 def sstate_allarch_samesigs(self, configA, configB):
377
378 self.write_config(configA)
379 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
380 bitbake("world meta-toolchain -S none")
381 self.write_config(configB)
382 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
383 bitbake("world meta-toolchain -S none")
384
385 def get_files(d):
386 f = {}
387 for root, dirs, files in os.walk(d):
388 for name in files:
389 if "meta-environment" in root or "cross-canadian" in root:
390 continue
391 if "do_build" not in name:
392 # 1.4.1+gitAUTOINC+302fca9f4c-r0.do_package_write_ipk.sigdata.f3a2a38697da743f0dbed8b56aafcf79
393 (_, task, _, shash) = name.rsplit(".", 3)
394 f[os.path.join(os.path.basename(root), task)] = shash
395 return f
396
397 nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0])
398
399 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir)
400 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir)
401 self.maxDiff = None
402 self.assertEqual(files1, files2)
403 503
504class SStateHashSameSigs3(SStateBase):
404 def test_sstate_sametune_samesigs(self): 505 def test_sstate_sametune_samesigs(self):
405 """ 506 """
406 The sstate checksums of two identical machines (using the same tune) should be the 507 The sstate checksums of two identical machines (using the same tune) should be the
@@ -414,7 +515,7 @@ TCLIBCAPPEND = \"\"
414MACHINE = \"qemux86\" 515MACHINE = \"qemux86\"
415require conf/multilib.conf 516require conf/multilib.conf
416MULTILIBS = "multilib:lib32" 517MULTILIBS = "multilib:lib32"
417DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 518DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
418BB_SIGNATURE_HANDLER = "OEBasicHash" 519BB_SIGNATURE_HANDLER = "OEBasicHash"
419""") 520""")
420 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 521 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
@@ -425,7 +526,7 @@ TCLIBCAPPEND = \"\"
425MACHINE = \"qemux86copy\" 526MACHINE = \"qemux86copy\"
426require conf/multilib.conf 527require conf/multilib.conf
427MULTILIBS = "multilib:lib32" 528MULTILIBS = "multilib:lib32"
428DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 529DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
429BB_SIGNATURE_HANDLER = "OEBasicHash" 530BB_SIGNATURE_HANDLER = "OEBasicHash"
430""") 531""")
431 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") 532 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
@@ -435,7 +536,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
435 f = [] 536 f = []
436 for root, dirs, files in os.walk(d): 537 for root, dirs, files in os.walk(d):
437 for name in files: 538 for name in files:
438 if "meta-environment" in root or "cross-canadian" in root: 539 if "meta-environment" in root or "cross-canadian" in root or 'meta-ide-support' in root:
439 continue 540 continue
440 if "qemux86copy-" in root or "qemux86-" in root: 541 if "qemux86copy-" in root or "qemux86-" in root:
441 continue 542 continue
@@ -462,7 +563,7 @@ TCLIBCAPPEND = \"\"
462MACHINE = \"qemux86\" 563MACHINE = \"qemux86\"
463require conf/multilib.conf 564require conf/multilib.conf
464MULTILIBS = "multilib:lib32" 565MULTILIBS = "multilib:lib32"
465DEFAULTTUNE_virtclass-multilib-lib32 = "x86" 566DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
466BB_SIGNATURE_HANDLER = "OEBasicHash" 567BB_SIGNATURE_HANDLER = "OEBasicHash"
467""") 568""")
468 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") 569 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
@@ -488,7 +589,7 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
488 self.maxDiff = None 589 self.maxDiff = None
489 self.assertCountEqual(files1, files2) 590 self.assertCountEqual(files1, files2)
490 591
491 592class SStateHashSameSigs4(SStateBase):
492 def test_sstate_noop_samesigs(self): 593 def test_sstate_noop_samesigs(self):
493 """ 594 """
494 The sstate checksums of two builds with these variables changed or 595 The sstate checksums of two builds with these variables changed or
@@ -503,7 +604,7 @@ PARALLEL_MAKE = "-j 1"
503DL_DIR = "${TOPDIR}/download1" 604DL_DIR = "${TOPDIR}/download1"
504TIME = "111111" 605TIME = "111111"
505DATE = "20161111" 606DATE = "20161111"
506INHERIT_remove = "buildstats-summary buildhistory uninative" 607INHERIT:remove = "buildstats-summary buildhistory uninative"
507http_proxy = "" 608http_proxy = ""
508BB_SIGNATURE_HANDLER = "OEBasicHash" 609BB_SIGNATURE_HANDLER = "OEBasicHash"
509""") 610""")
@@ -519,7 +620,7 @@ DL_DIR = "${TOPDIR}/download2"
519TIME = "222222" 620TIME = "222222"
520DATE = "20161212" 621DATE = "20161212"
521# Always remove uninative as we're changing proxies 622# Always remove uninative as we're changing proxies
522INHERIT_remove = "uninative" 623INHERIT:remove = "uninative"
523INHERIT += "buildstats-summary buildhistory" 624INHERIT += "buildstats-summary buildhistory"
524http_proxy = "http://example.com/" 625http_proxy = "http://example.com/"
525BB_SIGNATURE_HANDLER = "OEBasicHash" 626BB_SIGNATURE_HANDLER = "OEBasicHash"
@@ -573,3 +674,335 @@ BB_SIGNATURE_HANDLER = "OEBasicHash"
573 compare_sigfiles(rest, files1, files2, compare=False) 674 compare_sigfiles(rest, files1, files2, compare=False)
574 675
575 self.fail("sstate hashes not identical.") 676 self.fail("sstate hashes not identical.")
677
678 def test_sstate_movelayer_samesigs(self):
679 """
680 The sstate checksums of two builds with the same oe-core layer in two
681 different locations should be the same.
682 """
683 core_layer = os.path.join(
684 self.tc.td["COREBASE"], 'meta')
685 copy_layer_1 = self.topdir + "/meta-copy1/meta"
686 copy_layer_2 = self.topdir + "/meta-copy2/meta"
687
688 oe.path.copytree(core_layer, copy_layer_1)
689 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy1/scripts")
690 self.write_config("""
691TMPDIR = "${TOPDIR}/tmp-sstatesamehash"
692""")
693 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_1, core_layer)
694 self.write_bblayers_config(bblayers_conf)
695 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash")
696 bitbake("bash -S none")
697
698 oe.path.copytree(core_layer, copy_layer_2)
699 os.symlink(os.path.dirname(core_layer) + "/scripts", self.topdir + "/meta-copy2/scripts")
700 self.write_config("""
701TMPDIR = "${TOPDIR}/tmp-sstatesamehash2"
702""")
703 bblayers_conf = 'BBLAYERS += "%s"\nBBLAYERS:remove = "%s"' % (copy_layer_2, core_layer)
704 self.write_bblayers_config(bblayers_conf)
705 self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2")
706 bitbake("bash -S none")
707
708 def get_files(d):
709 f = []
710 for root, dirs, files in os.walk(d):
711 for name in files:
712 f.append(os.path.join(root, name))
713 return f
714 files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps")
715 files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps")
716 files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2]
717 self.maxDiff = None
718 self.assertCountEqual(files1, files2)
719
720class SStateFindSiginfo(SStateBase):
721 def test_sstate_compare_sigfiles_and_find_siginfo(self):
722 """
723 Test the functionality of the find_siginfo: basic function and callback in compare_sigfiles
724 """
725 self.write_config("""
726TMPDIR = \"${TOPDIR}/tmp-sstates-findsiginfo\"
727TCLIBCAPPEND = \"\"
728MACHINE = \"qemux86-64\"
729require conf/multilib.conf
730MULTILIBS = "multilib:lib32"
731DEFAULTTUNE:virtclass-multilib-lib32 = "x86"
732BB_SIGNATURE_HANDLER = "OEBasicHash"
733""")
734 self.track_for_cleanup(self.topdir + "/tmp-sstates-findsiginfo")
735
736 pns = ["binutils", "binutils-native", "lib32-binutils"]
737 target_configs = [
738"""
739TMPVAL1 = "tmpval1"
740TMPVAL2 = "tmpval2"
741do_tmptask1() {
742 echo ${TMPVAL1}
743}
744do_tmptask2() {
745 echo ${TMPVAL2}
746}
747addtask do_tmptask1
748addtask tmptask2 before do_tmptask1
749""",
750"""
751TMPVAL3 = "tmpval3"
752TMPVAL4 = "tmpval4"
753do_tmptask1() {
754 echo ${TMPVAL3}
755}
756do_tmptask2() {
757 echo ${TMPVAL4}
758}
759addtask do_tmptask1
760addtask tmptask2 before do_tmptask1
761"""
762 ]
763
764 for target_config in target_configs:
765 self.write_recipeinc("binutils", target_config)
766 for pn in pns:
767 bitbake("%s -c do_tmptask1 -S none" % pn)
768 self.delete_recipeinc("binutils")
769
770 with bb.tinfoil.Tinfoil() as tinfoil:
771 tinfoil.prepare(config_only=True)
772
773 def find_siginfo(pn, taskname, sigs=None):
774 result = None
775 command_complete = False
776 tinfoil.set_event_mask(["bb.event.FindSigInfoResult",
777 "bb.command.CommandCompleted"])
778 ret = tinfoil.run_command("findSigInfo", pn, taskname, sigs)
779 if ret:
780 while result is None or not command_complete:
781 event = tinfoil.wait_event(1)
782 if event:
783 if isinstance(event, bb.command.CommandCompleted):
784 command_complete = True
785 elif isinstance(event, bb.event.FindSigInfoResult):
786 result = event.result
787 return result
788
789 def recursecb(key, hash1, hash2):
790 nonlocal recursecb_count
791 recursecb_count += 1
792 hashes = [hash1, hash2]
793 hashfiles = find_siginfo(key, None, hashes)
794 self.assertCountEqual(hashes, hashfiles)
795 bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
796
797 for pn in pns:
798 recursecb_count = 0
799 matches = find_siginfo(pn, "do_tmptask1")
800 self.assertGreaterEqual(len(matches), 2)
801 latesthashes = sorted(matches.keys(), key=lambda h: matches[h]['time'])[-2:]
802 bb.siggen.compare_sigfiles(matches[latesthashes[-2]]['path'], matches[latesthashes[-1]]['path'], recursecb)
803 self.assertEqual(recursecb_count,1)
804
805class SStatePrintdiff(SStateBase):
806 def run_test_printdiff_changerecipe(self, target, change_recipe, change_bbtask, change_content, expected_sametmp_output, expected_difftmp_output):
807 import time
808 self.write_config("""
809TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
810""".format(time.time()))
811 # Use runall do_build to ensure any indirect sstate is created, e.g. tzcode-native on both x86 and
812 # aarch64 hosts since only allarch target recipes depend upon it and it may not be built otherwise.
813 # A bitbake -c cleansstate tzcode-native would cause some of these tests to error for example.
814 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
815 bitbake("-S none {}".format(target))
816 bitbake(change_bbtask)
817 self.write_recipeinc(change_recipe, change_content)
818 result_sametmp = bitbake("-S printdiff {}".format(target))
819
820 self.write_config("""
821TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
822""".format(time.time()))
823 result_difftmp = bitbake("-S printdiff {}".format(target))
824
825 self.delete_recipeinc(change_recipe)
826 for item in expected_sametmp_output:
827 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
828 for item in expected_difftmp_output:
829 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
830
831 def run_test_printdiff_changeconfig(self, target, change_bbtasks, change_content, expected_sametmp_output, expected_difftmp_output):
832 import time
833 self.write_config("""
834TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-sametmp-{}"
835""".format(time.time()))
836 bitbake("--runall build --runall deploy_source_date_epoch {}".format(target))
837 bitbake("-S none {}".format(target))
838 bitbake(" ".join(change_bbtasks))
839 self.append_config(change_content)
840 result_sametmp = bitbake("-S printdiff {}".format(target))
841
842 self.write_config("""
843TMPDIR = "${{TOPDIR}}/tmp-sstateprintdiff-difftmp-{}"
844""".format(time.time()))
845 self.append_config(change_content)
846 result_difftmp = bitbake("-S printdiff {}".format(target))
847
848 for item in expected_sametmp_output:
849 self.assertIn(item, result_sametmp.output, msg = "Item {} not found in output:\n{}".format(item, result_sametmp.output))
850 for item in expected_difftmp_output:
851 self.assertIn(item, result_difftmp.output, msg = "Item {} not found in output:\n{}".format(item, result_difftmp.output))
852
853
854 # Check if printdiff walks the full dependency chain from the image target to where the change is in a specific recipe
855 def test_image_minimal_vs_perlcross(self):
856 expected_output = ("Task perlcross-native:do_install couldn't be used from the cache because:",
857"We need hash",
858"most recent matching task was")
859 expected_sametmp_output = expected_output + (
860"Variable do_install value changed",
861'+ echo "this changes the task signature"')
862 expected_difftmp_output = expected_output
863
864 self.run_test_printdiff_changerecipe("core-image-minimal", "perlcross", "-c do_install perlcross-native",
865"""
866do_install:append() {
867 echo "this changes the task signature"
868}
869""",
870expected_sametmp_output, expected_difftmp_output)
871
872 # Check if changes to gcc-source (which uses tmp/work-shared) are correctly discovered
873 def test_gcc_runtime_vs_gcc_source(self):
874 gcc_source_pn = 'gcc-source-%s' % get_bb_vars(['PV'], 'gcc')['PV']
875
876 expected_output = ("Task {}:do_preconfigure couldn't be used from the cache because:".format(gcc_source_pn),
877"We need hash",
878"most recent matching task was")
879 expected_sametmp_output = expected_output + (
880"Variable do_preconfigure value changed",
881'+ print("this changes the task signature")')
882 expected_difftmp_output = expected_output
883
884 self.run_test_printdiff_changerecipe("gcc-runtime", "gcc-source", "-c do_preconfigure {}".format(gcc_source_pn),
885"""
886python do_preconfigure:append() {
887 print("this changes the task signature")
888}
889""",
890expected_sametmp_output, expected_difftmp_output)
891
892 # Check if changing a really base task definiton is reported against multiple core recipes using it
893 def test_image_minimal_vs_base_do_configure(self):
894 change_bbtasks = ('zstd-native:do_configure',
895'texinfo-dummy-native:do_configure',
896'ldconfig-native:do_configure',
897'gettext-minimal-native:do_configure',
898'tzcode-native:do_configure',
899'makedevs-native:do_configure',
900'pigz-native:do_configure',
901'update-rc.d-native:do_configure',
902'unzip-native:do_configure',
903'gnu-config-native:do_configure')
904
905 expected_output = ["Task {} couldn't be used from the cache because:".format(t) for t in change_bbtasks] + [
906"We need hash",
907"most recent matching task was"]
908
909 expected_sametmp_output = expected_output + [
910"Variable base_do_configure value changed",
911'+ echo "this changes base_do_configure() definiton "']
912 expected_difftmp_output = expected_output
913
914 self.run_test_printdiff_changeconfig("core-image-minimal",change_bbtasks,
915"""
916INHERIT += "base-do-configure-modified"
917""",
918expected_sametmp_output, expected_difftmp_output)
919
920@OETestTag("yocto-mirrors")
921class SStateMirrors(SStateBase):
922 def check_bb_output(self, output, exceptions, check_cdn):
923 def is_exception(object, exceptions):
924 for e in exceptions:
925 if re.search(e, object):
926 return True
927 return False
928
929 output_l = output.splitlines()
930 for l in output_l:
931 if l.startswith("Sstate summary"):
932 for idx, item in enumerate(l.split()):
933 if item == 'Missed':
934 missing_objects = int(l.split()[idx+1])
935 break
936 else:
937 self.fail("Did not find missing objects amount in sstate summary: {}".format(l))
938 break
939 else:
940 self.fail("Did not find 'Sstate summary' line in bitbake output")
941
942 failed_urls = []
943 failed_urls_extrainfo = []
944 for l in output_l:
945 if "SState: Unsuccessful fetch test for" in l and check_cdn:
946 missing_object = l.split()[6]
947 elif "SState: Looked for but didn't find file" in l and not check_cdn:
948 missing_object = l.split()[8]
949 else:
950 missing_object = None
951 if missing_object:
952 if not is_exception(missing_object, exceptions):
953 failed_urls.append(missing_object)
954 else:
955 missing_objects -= 1
956
957 if "urlopen failed for" in l and not is_exception(l, exceptions):
958 failed_urls_extrainfo.append(l)
959
960 self.assertEqual(len(failed_urls), missing_objects, "Amount of reported missing objects does not match failed URLs: {}\nFailed URLs:\n{}\nFetcher diagnostics:\n{}".format(missing_objects, "\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
961 self.assertEqual(len(failed_urls), 0, "Missing objects in the cache:\n{}\nFetcher diagnostics:\n{}".format("\n".join(failed_urls), "\n".join(failed_urls_extrainfo)))
962
963 def run_test(self, machine, targets, exceptions, check_cdn = True, ignore_errors = False):
964 # sstate is checked for existence of these, but they never get written out to begin with
965 exceptions += ["{}.*image_qa".format(t) for t in targets.split()]
966 exceptions += ["{}.*deploy_source_date_epoch".format(t) for t in targets.split()]
967 exceptions += ["{}.*image_complete".format(t) for t in targets.split()]
968 exceptions += ["linux-yocto.*shared_workdir"]
969 # these get influnced by IMAGE_FSTYPES tweaks in yocto-autobuilder-helper's config.json (on x86-64)
970 # additionally, they depend on noexec (thus, absent stamps) package, install, etc. image tasks,
971 # which makes tracing other changes difficult
972 exceptions += ["{}.*create_spdx".format(t) for t in targets.split()]
973 exceptions += ["{}.*create_runtime_spdx".format(t) for t in targets.split()]
974
975 if check_cdn:
976 self.config_sstate(True)
977 self.append_config("""
978MACHINE = "{}"
979BB_HASHSERVE_UPSTREAM = "hashserv.yocto.io:8687"
980SSTATE_MIRRORS ?= "file://.* http://cdn.jsdelivr.net/yocto/sstate/all/PATH;downloadfilename=PATH"
981""".format(machine))
982 else:
983 self.append_config("""
984MACHINE = "{}"
985""".format(machine))
986 result = bitbake("-DD -n {}".format(targets))
987 bitbake("-S none {}".format(targets))
988 if ignore_errors:
989 return
990 self.check_bb_output(result.output, exceptions, check_cdn)
991
992 def test_cdn_mirror_qemux86_64(self):
993 exceptions = []
994 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
995 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
996
997 def test_cdn_mirror_qemuarm64(self):
998 exceptions = []
999 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, ignore_errors = True)
1000 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions)
1001
1002 def test_local_cache_qemux86_64(self):
1003 exceptions = []
1004 self.run_test("qemux86-64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
1005
1006 def test_local_cache_qemuarm64(self):
1007 exceptions = []
1008 self.run_test("qemuarm64", "core-image-minimal core-image-full-cmdline core-image-sato-sdk", exceptions, check_cdn = False)
diff --git a/meta/lib/oeqa/selftest/cases/sysroot.py b/meta/lib/oeqa/selftest/cases/sysroot.py
index 6e34927c90..ef854f6fee 100644
--- a/meta/lib/oeqa/selftest/cases/sysroot.py
+++ b/meta/lib/oeqa/selftest/cases/sysroot.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import uuid 7import uuid
6 8
7from oeqa.selftest.case import OESelftestTestCase 9from oeqa.selftest.case import OESelftestTestCase
8from oeqa.utils.commands import bitbake 10from oeqa.utils.commands import bitbake
9 11
10class SysrootTests(OESelftestTestCase): 12class SysrootTests(OESelftestTestCase):
11 def test_sysroot_cleanup(self): 13 def test_sysroot_cleanup(self):
@@ -24,14 +26,61 @@ class SysrootTests(OESelftestTestCase):
24 self.write_config(""" 26 self.write_config("""
25PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1" 27PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch1"
26MACHINE = "qemux86" 28MACHINE = "qemux86"
27TESTSTRING_pn-sysroot-test-arch1 = "%s" 29TESTSTRING:pn-sysroot-test-arch1 = "%s"
28TESTSTRING_pn-sysroot-test-arch2 = "%s" 30TESTSTRING:pn-sysroot-test-arch2 = "%s"
29""" % (uuid1, uuid2)) 31""" % (uuid1, uuid2))
30 bitbake("sysroot-test") 32 bitbake("sysroot-test")
31 self.write_config(""" 33 self.write_config("""
32PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2" 34PREFERRED_PROVIDER_virtual/sysroot-test = "sysroot-test-arch2"
33MACHINE = "qemux86copy" 35MACHINE = "qemux86copy"
34TESTSTRING_pn-sysroot-test-arch1 = "%s" 36TESTSTRING:pn-sysroot-test-arch1 = "%s"
35TESTSTRING_pn-sysroot-test-arch2 = "%s" 37TESTSTRING:pn-sysroot-test-arch2 = "%s"
36""" % (uuid1, uuid2)) 38""" % (uuid1, uuid2))
37 bitbake("sysroot-test") 39 bitbake("sysroot-test")
40
41 def test_sysroot_max_shebang(self):
42 """
43 Summary: Check max shebang triggers. To confirm [YOCTO #11053] is closed.
44 Expected: Fail when a shebang bigger than the max shebang-size is reached.
45 Author: Paulo Neves <ptsneves@gmail.com>
46 """
47 expected = "maximum shebang size exceeded, the maximum size is 128. [shebang-size]"
48 res = bitbake("sysroot-shebang-test-native -c populate_sysroot", ignore_status=True)
49 self.assertTrue(expected in res.output, msg=res.output)
50 self.assertTrue(res.status != 0)
51
52 def test_sysroot_la(self):
53 """
54 Summary: Check that workdir paths are not contained in .la files.
55 Expected: Fail when a workdir path is found in the file content.
56 Author: Paulo Neves <ptsneves@gmail.com>
57 """
58 expected = "la-test.la failed sanity test (workdir) in path"
59
60 res = bitbake("sysroot-la-test -c populate_sysroot", ignore_status=True)
61 self.assertTrue(expected in res.output, msg=res.output)
62 self.assertTrue('[la]' in res.output, msg=res.output)
63 self.assertTrue(res.status != 0)
64
65 res = bitbake("sysroot-la-test-native -c populate_sysroot", ignore_status=True)
66 self.assertTrue(expected in res.output, msg=res.output)
67 self.assertTrue('[la]' in res.output, msg=res.output)
68 self.assertTrue(res.status != 0)
69
70 def test_sysroot_pkgconfig(self):
71 """
72 Summary: Check that tmpdir paths are not contained in .pc files.
73 Expected: Fail when a tmpdir path is found in the file content.
74 Author: Paulo Neves <ptsneves@gmail.com>
75 """
76 expected = "test.pc failed sanity test (tmpdir) in path"
77
78 res = bitbake("sysroot-pc-test -c populate_sysroot", ignore_status=True)
79 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
80 self.assertTrue(expected in res.output, msg=res.output)
81 self.assertTrue(res.status != 0)
82
83 res = bitbake("sysroot-pc-test-native -c populate_sysroot", ignore_status=True)
84 self.assertTrue(expected in res.output, msg=res.output)
85 self.assertTrue('[pkgconfig]' in res.output, msg=res.output)
86 self.assertTrue(res.status != 0)
diff --git a/meta/lib/oeqa/selftest/cases/tinfoil.py b/meta/lib/oeqa/selftest/cases/tinfoil.py
index a51c6048d3..21c8686b2a 100644
--- a/meta/lib/oeqa/selftest/cases/tinfoil.py
+++ b/meta/lib/oeqa/selftest/cases/tinfoil.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
@@ -9,7 +11,6 @@ import logging
9import bb.tinfoil 11import bb.tinfoil
10 12
11from oeqa.selftest.case import OESelftestTestCase 13from oeqa.selftest.case import OESelftestTestCase
12from oeqa.utils.commands import runCmd
13 14
14class TinfoilTests(OESelftestTestCase): 15class TinfoilTests(OESelftestTestCase):
15 """ Basic tests for the tinfoil API """ 16 """ Basic tests for the tinfoil API """
@@ -47,6 +48,17 @@ class TinfoilTests(OESelftestTestCase):
47 rd = tinfoil.parse_recipe_file(best[3]) 48 rd = tinfoil.parse_recipe_file(best[3])
48 self.assertEqual(testrecipe, rd.getVar('PN')) 49 self.assertEqual(testrecipe, rd.getVar('PN'))
49 50
51 def test_parse_virtual_recipe(self):
52 with bb.tinfoil.Tinfoil() as tinfoil:
53 tinfoil.prepare(config_only=False, quiet=2)
54 testrecipe = 'nativesdk-gcc'
55 best = tinfoil.find_best_provider(testrecipe)
56 if not best:
57 self.fail('Unable to find recipe providing %s' % testrecipe)
58 rd = tinfoil.parse_recipe_file(best[3])
59 self.assertEqual(testrecipe, rd.getVar('PN'))
60 self.assertIsNotNone(rd.getVar('FILE_LAYERNAME'))
61
50 def test_parse_recipe_copy_expand(self): 62 def test_parse_recipe_copy_expand(self):
51 with bb.tinfoil.Tinfoil() as tinfoil: 63 with bb.tinfoil.Tinfoil() as tinfoil:
52 tinfoil.prepare(config_only=False, quiet=2) 64 tinfoil.prepare(config_only=False, quiet=2)
@@ -65,6 +77,32 @@ class TinfoilTests(OESelftestTestCase):
65 localdata.setVar('PN', 'hello') 77 localdata.setVar('PN', 'hello')
66 self.assertEqual('hello', localdata.getVar('BPN')) 78 self.assertEqual('hello', localdata.getVar('BPN'))
67 79
80 # The config_data API to parse_recipe_file is used by:
81 # layerindex-web layerindex/update_layer.py
82 def test_parse_recipe_custom_data(self):
83 with bb.tinfoil.Tinfoil() as tinfoil:
84 tinfoil.prepare(config_only=False, quiet=2)
85 localdata = bb.data.createCopy(tinfoil.config_data)
86 localdata.setVar("TESTVAR", "testval")
87 testrecipe = 'mdadm'
88 best = tinfoil.find_best_provider(testrecipe)
89 if not best:
90 self.fail('Unable to find recipe providing %s' % testrecipe)
91 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
92 self.assertEqual("testval", rd.getVar('TESTVAR'))
93
94 def test_parse_virtual_recipe_custom_data(self):
95 with bb.tinfoil.Tinfoil() as tinfoil:
96 tinfoil.prepare(config_only=False, quiet=2)
97 localdata = bb.data.createCopy(tinfoil.config_data)
98 localdata.setVar("TESTVAR", "testval")
99 testrecipe = 'nativesdk-gcc'
100 best = tinfoil.find_best_provider(testrecipe)
101 if not best:
102 self.fail('Unable to find recipe providing %s' % testrecipe)
103 rd = tinfoil.parse_recipe_file(best[3], config_data=localdata)
104 self.assertEqual("testval", rd.getVar('TESTVAR'))
105
68 def test_list_recipes(self): 106 def test_list_recipes(self):
69 with bb.tinfoil.Tinfoil() as tinfoil: 107 with bb.tinfoil.Tinfoil() as tinfoil:
70 tinfoil.prepare(config_only=False, quiet=2) 108 tinfoil.prepare(config_only=False, quiet=2)
@@ -87,21 +125,20 @@ class TinfoilTests(OESelftestTestCase):
87 with bb.tinfoil.Tinfoil() as tinfoil: 125 with bb.tinfoil.Tinfoil() as tinfoil:
88 tinfoil.prepare(config_only=True) 126 tinfoil.prepare(config_only=True)
89 127
90 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) 128 tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit'])
91 129
92 # Need to drain events otherwise events that were masked may still be in the queue 130 # Need to drain events otherwise events that were masked may still be in the queue
93 while tinfoil.wait_event(): 131 while tinfoil.wait_event():
94 pass 132 pass
95 133
96 pattern = 'conf' 134 pattern = 'conf'
97 res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') 135 res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False)
98 self.assertTrue(res) 136 self.assertTrue(res)
99 137
100 eventreceived = False 138 eventreceived = False
101 commandcomplete = False 139 commandcomplete = False
102 start = time.time() 140 start = time.time()
103 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example 141 # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example
104 # The test is IO load sensitive too
105 while (not (eventreceived == True and commandcomplete == True) 142 while (not (eventreceived == True and commandcomplete == True)
106 and (time.time() - start < 60)): 143 and (time.time() - start < 60)):
107 # if we received both events (on let's say a good day), we are done 144 # if we received both events (on let's say a good day), we are done
@@ -111,14 +148,15 @@ class TinfoilTests(OESelftestTestCase):
111 commandcomplete = True 148 commandcomplete = True
112 elif isinstance(event, bb.event.FilesMatchingFound): 149 elif isinstance(event, bb.event.FilesMatchingFound):
113 self.assertEqual(pattern, event._pattern) 150 self.assertEqual(pattern, event._pattern)
114 self.assertIn('qemuarm.conf', event._matches) 151 self.assertIn('A', event._matches)
152 self.assertIn('B', event._matches)
115 eventreceived = True 153 eventreceived = True
116 elif isinstance(event, logging.LogRecord): 154 elif isinstance(event, logging.LogRecord):
117 continue 155 continue
118 else: 156 else:
119 self.fail('Unexpected event: %s' % event) 157 self.fail('Unexpected event: %s' % event)
120 158
121 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') 159 self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived))
122 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') 160 self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
123 161
124 def test_setvariable_clean(self): 162 def test_setvariable_clean(self):
@@ -173,8 +211,8 @@ class TinfoilTests(OESelftestTestCase):
173 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') 211 self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name')
174 # Test overrides 212 # Test overrides
175 tinfoil.config_data.setVar('TESTVAR', 'original') 213 tinfoil.config_data.setVar('TESTVAR', 'original')
176 tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') 214 tinfoil.config_data.setVar('TESTVAR:overrideone', 'one')
177 tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') 215 tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two')
178 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') 216 tinfoil.config_data.appendVar('OVERRIDES', ':overrideone')
179 value = tinfoil.config_data.getVar('TESTVAR') 217 value = tinfoil.config_data.getVar('TESTVAR')
180 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') 218 self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
diff --git a/meta/lib/oeqa/selftest/cases/usergrouptests.py b/meta/lib/oeqa/selftest/cases/usergrouptests.py
new file mode 100644
index 0000000000..3c59b0f290
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/usergrouptests.py
@@ -0,0 +1,57 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import shutil
9from oeqa.selftest.case import OESelftestTestCase
10from oeqa.utils.commands import bitbake
11from oeqa.utils.commands import bitbake, get_bb_var, get_test_layer
12
13class UserGroupTests(OESelftestTestCase):
14 def test_group_from_dep_package(self):
15 self.logger.info("Building creategroup2")
16 bitbake(' creategroup2 creategroup1')
17 bitbake(' creategroup2 creategroup1 -c clean')
18 self.logger.info("Packaging creategroup2")
19 self.assertTrue(bitbake(' creategroup2 -c package'))
20
21 def test_add_task_between_p_sysroot_and_package(self):
22 # Test for YOCTO #14961
23 self.assertTrue(bitbake('useraddbadtask -C fetch'))
24
25 def test_postinst_order(self):
26 self.logger.info("Building dcreategroup")
27 self.assertTrue(bitbake(' dcreategroup'))
28
29 def test_static_useradd_from_dynamic(self):
30 metaselftestpath = get_test_layer()
31 self.logger.info("Building core-image-minimal to generate passwd/group file")
32 bitbake(' core-image-minimal')
33 self.logger.info("Setting up useradd-staticids")
34 repropassdir = os.path.join(metaselftestpath, "conf/include")
35 os.makedirs(repropassdir)
36 etcdir=os.path.join(os.path.join(os.path.join(get_bb_var("TMPDIR"), "work"), \
37 os.path.join(get_bb_var("MACHINE").replace("-","_")+"-poky-linux", "core-image-minimal/1.0/rootfs/etc")))
38 shutil.copy(os.path.join(etcdir, "passwd"), os.path.join(repropassdir, "reproducable-passwd"))
39 shutil.copy(os.path.join(etcdir, "group"), os.path.join(repropassdir, "reproducable-group"))
40 # Copy the original local.conf
41 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'))
42
43 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
44 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
45 self.write_config("USERADD_UID_TABLES += \"conf/include/reproducible-passwd\"")
46 self.write_config("USERADD_GID_TABLES += \"conf/include/reproducible-group\"")
47 self.logger.info("Rebuild with staticids")
48 bitbake(' core-image-minimal')
49 shutil.copyfile(os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf.orig'), os.path.join(os.environ.get('BUILDDIR'), 'conf/local.conf'))
50 self.logger.info("Rebuild without staticids")
51 bitbake(' core-image-minimal')
52 self.write_config("USERADDEXTENSION = \"useradd-staticids\"")
53 self.write_config("USERADD_ERROR_DYNAMIC ??= \"error\"")
54 self.write_config("USERADD_UID_TABLES += \"files/static-passwd\"")
55 self.write_config("USERADD_GID_TABLES += \"files/static-group\"")
56 self.logger.info("Rebuild with other staticids")
57 self.assertTrue(bitbake(' core-image-minimal'))
diff --git a/meta/lib/oeqa/selftest/cases/wic.py b/meta/lib/oeqa/selftest/cases/wic.py
index 2bf5cb9a86..b616759209 100644
--- a/meta/lib/oeqa/selftest/cases/wic.py
+++ b/meta/lib/oeqa/selftest/cases/wic.py
@@ -11,39 +11,19 @@
11import os 11import os
12import sys 12import sys
13import unittest 13import unittest
14import hashlib
14 15
15from glob import glob 16from glob import glob
16from shutil import rmtree, copy 17from shutil import rmtree, copy
17from functools import wraps, lru_cache
18from tempfile import NamedTemporaryFile 18from tempfile import NamedTemporaryFile
19from tempfile import TemporaryDirectory
19 20
20from oeqa.selftest.case import OESelftestTestCase 21from oeqa.selftest.case import OESelftestTestCase
22from oeqa.core.decorator import OETestTag
23from oeqa.core.decorator.data import skipIfNotArch
21from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu 24from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu
22 25
23 26
24@lru_cache(maxsize=32)
25def get_host_arch(recipe):
26 """A cached call to get_bb_var('HOST_ARCH', <recipe>)"""
27 return get_bb_var('HOST_ARCH', recipe)
28
29
30def only_for_arch(archs, image='core-image-minimal'):
31 """Decorator for wrapping test cases that can be run only for specific target
32 architectures. A list of compatible architectures is passed in `archs`.
33 Current architecture will be determined by parsing bitbake output for
34 `image` recipe.
35 """
36 def wrapper(func):
37 @wraps(func)
38 def wrapped_f(*args, **kwargs):
39 arch = get_host_arch(image)
40 if archs and arch not in archs:
41 raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch)
42 return func(*args, **kwargs)
43 wrapped_f.__name__ = func.__name__
44 return wrapped_f
45 return wrapper
46
47def extract_files(debugfs_output): 27def extract_files(debugfs_output):
48 """ 28 """
49 extract file names from the output of debugfs -R 'ls -p', 29 extract file names from the output of debugfs -R 'ls -p',
@@ -77,22 +57,18 @@ class WicTestCase(OESelftestTestCase):
77 57
78 def setUpLocal(self): 58 def setUpLocal(self):
79 """This code is executed before each test method.""" 59 """This code is executed before each test method."""
80 self.resultdir = self.builddir + "/wic-tmp/" 60 self.resultdir = os.path.join(self.builddir, "wic-tmp")
81 super(WicTestCase, self).setUpLocal() 61 super(WicTestCase, self).setUpLocal()
82 62
83 # Do this here instead of in setUpClass as the base setUp does some 63 # Do this here instead of in setUpClass as the base setUp does some
84 # clean up which can result in the native tools built earlier in 64 # clean up which can result in the native tools built earlier in
85 # setUpClass being unavailable. 65 # setUpClass being unavailable.
86 if not WicTestCase.image_is_ready: 66 if not WicTestCase.image_is_ready:
87 if get_bb_var('USE_NLS') == 'yes': 67 if self.td['USE_NLS'] != 'yes':
88 bitbake('wic-tools') 68 self.skipTest('wic-tools needs USE_NLS=yes')
89 else:
90 self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable')
91 69
92 bitbake('core-image-minimal') 70 bitbake('wic-tools core-image-minimal core-image-minimal-mtdutils')
93 bitbake('core-image-minimal-mtdutils')
94 WicTestCase.image_is_ready = True 71 WicTestCase.image_is_ready = True
95
96 rmtree(self.resultdir, ignore_errors=True) 72 rmtree(self.resultdir, ignore_errors=True)
97 73
98 def tearDownLocal(self): 74 def tearDownLocal(self):
@@ -103,15 +79,13 @@ class WicTestCase(OESelftestTestCase):
103 def _get_image_env_path(self, image): 79 def _get_image_env_path(self, image):
104 """Generate and obtain the path to <image>.env""" 80 """Generate and obtain the path to <image>.env"""
105 if image not in WicTestCase.wicenv_cache: 81 if image not in WicTestCase.wicenv_cache:
106 self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) 82 bitbake('%s -c do_rootfs_wicenv' % image)
107 bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image) 83 stdir = get_bb_var('STAGING_DIR', image)
108 stdir = bb_vars['STAGING_DIR'] 84 machine = self.td["MACHINE"]
109 machine = bb_vars['MACHINE']
110 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') 85 WicTestCase.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata')
111 return WicTestCase.wicenv_cache[image] 86 return WicTestCase.wicenv_cache[image]
112 87
113class Wic(WicTestCase): 88class CLITests(OESelftestTestCase):
114
115 def test_version(self): 89 def test_version(self):
116 """Test wic --version""" 90 """Test wic --version"""
117 runCmd('wic --version') 91 runCmd('wic --version')
@@ -172,68 +146,136 @@ class Wic(WicTestCase):
172 """Test wic without command""" 146 """Test wic without command"""
173 self.assertEqual(1, runCmd('wic', ignore_status=True).status) 147 self.assertEqual(1, runCmd('wic', ignore_status=True).status)
174 148
149class Wic(WicTestCase):
150 def test_skip_kernel_install(self):
151 """Test the functionality of not installing the kernel in the boot directory using the wic plugin"""
152 # create a temporary file for the WKS content
153 with NamedTemporaryFile("w", suffix=".wks") as wks:
154 wks.write(
155 'part --source bootimg-efi '
156 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=false" '
157 '--label boot --active\n'
158 )
159 wks.flush()
160 # create a temporary directory to extract the disk image to
161 with TemporaryDirectory() as tmpdir:
162 img = 'core-image-minimal'
163 # build the image using the WKS file
164 cmd = "wic create %s -e %s -o %s" % (
165 wks.name, img, self.resultdir)
166 runCmd(cmd)
167 wksname = os.path.splitext(os.path.basename(wks.name))[0]
168 out = glob(os.path.join(
169 self.resultdir, "%s-*.direct" % wksname))
170 self.assertEqual(1, len(out))
171 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
172 # extract the content of the disk image to the temporary directory
173 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
174 runCmd(cmd)
175 # check if the kernel is installed or not
176 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
177 for file in os.listdir(tmpdir):
178 if file == kimgtype:
179 raise AssertionError(
180 "The kernel image '{}' was found in the partition".format(kimgtype)
181 )
182
183 def test_kernel_install(self):
184 """Test the installation of the kernel to the boot directory in the wic plugin"""
185 # create a temporary file for the WKS content
186 with NamedTemporaryFile("w", suffix=".wks") as wks:
187 wks.write(
188 'part --source bootimg-efi '
189 '--sourceparams="loader=grub-efi,install-kernel-into-boot-dir=true" '
190 '--label boot --active\n'
191 )
192 wks.flush()
193 # create a temporary directory to extract the disk image to
194 with TemporaryDirectory() as tmpdir:
195 img = 'core-image-minimal'
196 # build the image using the WKS file
197 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
198 runCmd(cmd)
199 wksname = os.path.splitext(os.path.basename(wks.name))[0]
200 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
201 self.assertEqual(1, len(out))
202 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
203 # extract the content of the disk image to the temporary directory
204 cmd = "wic cp %s:1 %s -n %s" % (out[0], tmpdir, sysroot)
205 runCmd(cmd)
206 # check if the kernel is installed or not
207 kimgtype = get_bb_var('KERNEL_IMAGETYPE', img)
208 found = False
209 for file in os.listdir(tmpdir):
210 if file == kimgtype:
211 found = True
212 break
213 self.assertTrue(
214 found, "The kernel image '{}' was not found in the boot partition".format(kimgtype)
215 )
216
175 def test_build_image_name(self): 217 def test_build_image_name(self):
176 """Test wic create wictestdisk --image-name=core-image-minimal""" 218 """Test wic create wictestdisk --image-name=core-image-minimal"""
177 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir 219 cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir
178 runCmd(cmd) 220 runCmd(cmd)
179 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 221 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "wictestdisk-*.direct"))))
180 222
181 @only_for_arch(['i586', 'i686', 'x86_64']) 223 @skipIfNotArch(['i586', 'i686', 'x86_64'])
182 def test_gpt_image(self): 224 def test_gpt_image(self):
183 """Test creation of core-image-minimal with gpt table and UUID boot""" 225 """Test creation of core-image-minimal with gpt table and UUID boot"""
184 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir 226 cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir
185 runCmd(cmd) 227 runCmd(cmd)
186 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 228 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
187 229
188 @only_for_arch(['i586', 'i686', 'x86_64']) 230 @skipIfNotArch(['i586', 'i686', 'x86_64'])
189 def test_iso_image(self): 231 def test_iso_image(self):
190 """Test creation of hybrid iso image with legacy and EFI boot""" 232 """Test creation of hybrid iso image with legacy and EFI boot"""
191 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ 233 config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
192 'MACHINE_FEATURES_append = " efi"\n'\ 234 'MACHINE_FEATURES:append = " efi"\n'\
193 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 235 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
194 self.append_config(config) 236 self.append_config(config)
195 bitbake('core-image-minimal core-image-minimal-initramfs') 237 bitbake('core-image-minimal core-image-minimal-initramfs')
196 self.remove_config(config) 238 self.remove_config(config)
197 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir 239 cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir
198 runCmd(cmd) 240 runCmd(cmd)
199 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) 241 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "HYBRID_ISO_IMG-*.direct"))))
200 self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) 242 self.assertEqual(1, len(glob(os.path.join (self.resultdir, "HYBRID_ISO_IMG-*.iso"))))
201 243
202 @only_for_arch(['i586', 'i686', 'x86_64']) 244 @skipIfNotArch(['i586', 'i686', 'x86_64'])
203 def test_qemux86_directdisk(self): 245 def test_qemux86_directdisk(self):
204 """Test creation of qemux-86-directdisk image""" 246 """Test creation of qemux-86-directdisk image"""
205 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir 247 cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir
206 runCmd(cmd) 248 runCmd(cmd)
207 self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct"))) 249 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "qemux86-directdisk-*direct"))))
208 250
209 @only_for_arch(['i586', 'i686', 'x86_64']) 251 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
210 def test_mkefidisk(self): 252 def test_mkefidisk(self):
211 """Test creation of mkefidisk image""" 253 """Test creation of mkefidisk image"""
212 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir 254 cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir
213 runCmd(cmd) 255 runCmd(cmd)
214 self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct"))) 256 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "mkefidisk-*direct"))))
215 257
216 @only_for_arch(['i586', 'i686', 'x86_64']) 258 @skipIfNotArch(['i586', 'i686', 'x86_64'])
217 def test_bootloader_config(self): 259 def test_bootloader_config(self):
218 """Test creation of directdisk-bootloader-config image""" 260 """Test creation of directdisk-bootloader-config image"""
219 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 261 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
220 self.append_config(config) 262 self.append_config(config)
221 bitbake('core-image-minimal') 263 bitbake('core-image-minimal')
222 self.remove_config(config) 264 self.remove_config(config)
223 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir 265 cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir
224 runCmd(cmd) 266 runCmd(cmd)
225 self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct"))) 267 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-bootloader-config-*direct"))))
226 268
227 @only_for_arch(['i586', 'i686', 'x86_64']) 269 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
228 def test_systemd_bootdisk(self): 270 def test_systemd_bootdisk(self):
229 """Test creation of systemd-bootdisk image""" 271 """Test creation of systemd-bootdisk image"""
230 config = 'MACHINE_FEATURES_append = " efi"\n' 272 config = 'MACHINE_FEATURES:append = " efi"\n'
231 self.append_config(config) 273 self.append_config(config)
232 bitbake('core-image-minimal') 274 bitbake('core-image-minimal')
233 self.remove_config(config) 275 self.remove_config(config)
234 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir 276 cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir
235 runCmd(cmd) 277 runCmd(cmd)
236 self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct"))) 278 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "systemd-bootdisk-*direct"))))
237 279
238 def test_efi_bootpart(self): 280 def test_efi_bootpart(self):
239 """Test creation of efi-bootpart image""" 281 """Test creation of efi-bootpart image"""
@@ -242,7 +284,7 @@ class Wic(WicTestCase):
242 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype) 284 self.append_config('IMAGE_EFI_BOOT_FILES = "%s;kernel"\n' % kimgtype)
243 runCmd(cmd) 285 runCmd(cmd)
244 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 286 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
245 images = glob(self.resultdir + "mkefidisk-*.direct") 287 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
246 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 288 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
247 self.assertIn("kernel",result.output) 289 self.assertIn("kernel",result.output)
248 290
@@ -252,14 +294,15 @@ class Wic(WicTestCase):
252 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') 294 kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal')
253 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) 295 self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype)
254 runCmd(cmd) 296 runCmd(cmd)
255 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 297 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
256 298
257 @only_for_arch(['i586', 'i686', 'x86_64']) 299 # TODO this doesn't have to be x86-specific
300 @skipIfNotArch(['i586', 'i686', 'x86_64'])
258 def test_default_output_dir(self): 301 def test_default_output_dir(self):
259 """Test default output location""" 302 """Test default output location"""
260 for fname in glob("directdisk-*.direct"): 303 for fname in glob("directdisk-*.direct"):
261 os.remove(fname) 304 os.remove(fname)
262 config = 'DEPENDS_pn-core-image-minimal += "syslinux"\n' 305 config = 'DEPENDS:pn-core-image-minimal += "syslinux"\n'
263 self.append_config(config) 306 self.append_config(config)
264 bitbake('core-image-minimal') 307 bitbake('core-image-minimal')
265 self.remove_config(config) 308 self.remove_config(config)
@@ -267,7 +310,7 @@ class Wic(WicTestCase):
267 runCmd(cmd) 310 runCmd(cmd)
268 self.assertEqual(1, len(glob("directdisk-*.direct"))) 311 self.assertEqual(1, len(glob("directdisk-*.direct")))
269 312
270 @only_for_arch(['i586', 'i686', 'x86_64']) 313 @skipIfNotArch(['i586', 'i686', 'x86_64'])
271 def test_build_artifacts(self): 314 def test_build_artifacts(self):
272 """Test wic create directdisk providing all artifacts.""" 315 """Test wic create directdisk providing all artifacts."""
273 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 316 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -282,28 +325,28 @@ class Wic(WicTestCase):
282 "-n %(recipe_sysroot_native)s " 325 "-n %(recipe_sysroot_native)s "
283 "-r %(image_rootfs)s " 326 "-r %(image_rootfs)s "
284 "-o %(resultdir)s" % bbvars) 327 "-o %(resultdir)s" % bbvars)
285 self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) 328 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-*.direct"))))
286 329
287 def test_compress_gzip(self): 330 def test_compress_gzip(self):
288 """Test compressing an image with gzip""" 331 """Test compressing an image with gzip"""
289 runCmd("wic create wictestdisk " 332 runCmd("wic create wictestdisk "
290 "--image-name core-image-minimal " 333 "--image-name core-image-minimal "
291 "-c gzip -o %s" % self.resultdir) 334 "-c gzip -o %s" % self.resultdir)
292 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz"))) 335 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.gz"))))
293 336
294 def test_compress_bzip2(self): 337 def test_compress_bzip2(self):
295 """Test compressing an image with bzip2""" 338 """Test compressing an image with bzip2"""
296 runCmd("wic create wictestdisk " 339 runCmd("wic create wictestdisk "
297 "--image-name=core-image-minimal " 340 "--image-name=core-image-minimal "
298 "-c bzip2 -o %s" % self.resultdir) 341 "-c bzip2 -o %s" % self.resultdir)
299 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2"))) 342 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.bz2"))))
300 343
301 def test_compress_xz(self): 344 def test_compress_xz(self):
302 """Test compressing an image with xz""" 345 """Test compressing an image with xz"""
303 runCmd("wic create wictestdisk " 346 runCmd("wic create wictestdisk "
304 "--image-name=core-image-minimal " 347 "--image-name=core-image-minimal "
305 "--compress-with=xz -o %s" % self.resultdir) 348 "--compress-with=xz -o %s" % self.resultdir)
306 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz"))) 349 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct.xz"))))
307 350
308 def test_wrong_compressor(self): 351 def test_wrong_compressor(self):
309 """Test how wic breaks if wrong compressor is provided""" 352 """Test how wic breaks if wrong compressor is provided"""
@@ -317,23 +360,23 @@ class Wic(WicTestCase):
317 runCmd("wic create wictestdisk " 360 runCmd("wic create wictestdisk "
318 "--image-name=core-image-minimal " 361 "--image-name=core-image-minimal "
319 "-D -o %s" % self.resultdir) 362 "-D -o %s" % self.resultdir)
320 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 363 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
321 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 364 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
322 365
323 def test_debug_long(self): 366 def test_debug_long(self):
324 """Test --debug option""" 367 """Test --debug option"""
325 runCmd("wic create wictestdisk " 368 runCmd("wic create wictestdisk "
326 "--image-name=core-image-minimal " 369 "--image-name=core-image-minimal "
327 "--debug -o %s" % self.resultdir) 370 "--debug -o %s" % self.resultdir)
328 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 371 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
329 self.assertEqual(1, len(glob(self.resultdir + "tmp.wic*"))) 372 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "tmp.wic*"))))
330 373
331 def test_skip_build_check_short(self): 374 def test_skip_build_check_short(self):
332 """Test -s option""" 375 """Test -s option"""
333 runCmd("wic create wictestdisk " 376 runCmd("wic create wictestdisk "
334 "--image-name=core-image-minimal " 377 "--image-name=core-image-minimal "
335 "-s -o %s" % self.resultdir) 378 "-s -o %s" % self.resultdir)
336 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 379 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
337 380
338 def test_skip_build_check_long(self): 381 def test_skip_build_check_long(self):
339 """Test --skip-build-check option""" 382 """Test --skip-build-check option"""
@@ -341,14 +384,14 @@ class Wic(WicTestCase):
341 "--image-name=core-image-minimal " 384 "--image-name=core-image-minimal "
342 "--skip-build-check " 385 "--skip-build-check "
343 "--outdir %s" % self.resultdir) 386 "--outdir %s" % self.resultdir)
344 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 387 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
345 388
346 def test_build_rootfs_short(self): 389 def test_build_rootfs_short(self):
347 """Test -f option""" 390 """Test -f option"""
348 runCmd("wic create wictestdisk " 391 runCmd("wic create wictestdisk "
349 "--image-name=core-image-minimal " 392 "--image-name=core-image-minimal "
350 "-f -o %s" % self.resultdir) 393 "-f -o %s" % self.resultdir)
351 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 394 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
352 395
353 def test_build_rootfs_long(self): 396 def test_build_rootfs_long(self):
354 """Test --build-rootfs option""" 397 """Test --build-rootfs option"""
@@ -356,9 +399,10 @@ class Wic(WicTestCase):
356 "--image-name=core-image-minimal " 399 "--image-name=core-image-minimal "
357 "--build-rootfs " 400 "--build-rootfs "
358 "--outdir %s" % self.resultdir) 401 "--outdir %s" % self.resultdir)
359 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) 402 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))))
360 403
361 @only_for_arch(['i586', 'i686', 'x86_64']) 404 # TODO this doesn't have to be x86-specific
405 @skipIfNotArch(['i586', 'i686', 'x86_64'])
362 def test_rootfs_indirect_recipes(self): 406 def test_rootfs_indirect_recipes(self):
363 """Test usage of rootfs plugin with rootfs recipes""" 407 """Test usage of rootfs plugin with rootfs recipes"""
364 runCmd("wic create directdisk-multi-rootfs " 408 runCmd("wic create directdisk-multi-rootfs "
@@ -366,9 +410,10 @@ class Wic(WicTestCase):
366 "--rootfs rootfs1=core-image-minimal " 410 "--rootfs rootfs1=core-image-minimal "
367 "--rootfs rootfs2=core-image-minimal " 411 "--rootfs rootfs2=core-image-minimal "
368 "--outdir %s" % self.resultdir) 412 "--outdir %s" % self.resultdir)
369 self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct"))) 413 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "directdisk-multi-rootfs*.direct"))))
370 414
371 @only_for_arch(['i586', 'i686', 'x86_64']) 415 # TODO this doesn't have to be x86-specific
416 @skipIfNotArch(['i586', 'i686', 'x86_64'])
372 def test_rootfs_artifacts(self): 417 def test_rootfs_artifacts(self):
373 """Test usage of rootfs plugin with rootfs paths""" 418 """Test usage of rootfs plugin with rootfs paths"""
374 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], 419 bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'],
@@ -385,7 +430,7 @@ class Wic(WicTestCase):
385 "--rootfs-dir rootfs1=%(image_rootfs)s " 430 "--rootfs-dir rootfs1=%(image_rootfs)s "
386 "--rootfs-dir rootfs2=%(image_rootfs)s " 431 "--rootfs-dir rootfs2=%(image_rootfs)s "
387 "--outdir %(resultdir)s" % bbvars) 432 "--outdir %(resultdir)s" % bbvars)
388 self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars))) 433 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "%(wks)s-*.direct" % bbvars))))
389 434
390 def test_exclude_path(self): 435 def test_exclude_path(self):
391 """Test --exclude-path wks option.""" 436 """Test --exclude-path wks option."""
@@ -406,7 +451,7 @@ part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --r
406 % (wks_file, self.resultdir)) 451 % (wks_file, self.resultdir))
407 452
408 os.remove(wks_file) 453 os.remove(wks_file)
409 wicout = glob(self.resultdir + "%s-*direct" % 'temp') 454 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % 'temp'))
410 self.assertEqual(1, len(wicout)) 455 self.assertEqual(1, len(wicout))
411 456
412 wicimg = wicout[0] 457 wicimg = wicout[0]
@@ -686,21 +731,130 @@ part /etc --source rootfs --fstype=ext4 --change-directory=etc
686 % (wks_file, self.resultdir), ignore_status=True).status) 731 % (wks_file, self.resultdir), ignore_status=True).status)
687 os.remove(wks_file) 732 os.remove(wks_file)
688 733
734 def test_no_fstab_update(self):
735 """Test --no-fstab-update wks option."""
736
737 oldpath = os.environ['PATH']
738 os.environ['PATH'] = get_bb_var("PATH", "wic-tools")
739
740 # Get stock fstab from base-files recipe
741 bitbake('base-files -c do_install')
742 bf_fstab = os.path.join(get_bb_var('D', 'base-files'), 'etc', 'fstab')
743 self.assertEqual(True, os.path.exists(bf_fstab))
744 bf_fstab_md5sum = runCmd('md5sum %s 2>/dev/null' % bf_fstab).output.split(" ")[0]
745
746 try:
747 no_fstab_update_path = os.path.join(self.resultdir, 'test-no-fstab-update')
748 os.makedirs(no_fstab_update_path)
749 wks_file = os.path.join(no_fstab_update_path, 'temp.wks')
750 with open(wks_file, 'w') as wks:
751 wks.writelines(['part / --source rootfs --fstype=ext4 --label rootfs\n',
752 'part /mnt/p2 --source rootfs --rootfs-dir=core-image-minimal ',
753 '--fstype=ext4 --label p2 --no-fstab-update\n'])
754 runCmd("wic create %s -e core-image-minimal -o %s" \
755 % (wks_file, self.resultdir))
756
757 part_fstab_md5sum = []
758 for i in range(1, 3):
759 part = glob(os.path.join(self.resultdir, 'temp-*.direct.p') + str(i))[0]
760 part_fstab = runCmd("debugfs -R 'cat etc/fstab' %s 2>/dev/null" % (part))
761 part_fstab_md5sum.append(hashlib.md5((part_fstab.output + "\n\n").encode('utf-8')).hexdigest())
762
763 # '/etc/fstab' in partition 2 should contain the same stock fstab file
764 # as the one installed by the base-file recipe.
765 self.assertEqual(bf_fstab_md5sum, part_fstab_md5sum[1])
766
767 # '/etc/fstab' in partition 1 should contain an updated fstab file.
768 self.assertNotEqual(bf_fstab_md5sum, part_fstab_md5sum[0])
769
770 finally:
771 os.environ['PATH'] = oldpath
772
773 def test_no_fstab_update_errors(self):
774 """Test --no-fstab-update wks option error handling."""
775 wks_file = 'temp.wks'
776
777 # Absolute argument.
778 with open(wks_file, 'w') as wks:
779 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update /etc")
780 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
781 % (wks_file, self.resultdir), ignore_status=True).status)
782 os.remove(wks_file)
783
784 # Argument pointing to parent directory.
785 with open(wks_file, 'w') as wks:
786 wks.write("part / --source rootfs --fstype=ext4 --no-fstab-update ././..")
787 self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \
788 % (wks_file, self.resultdir), ignore_status=True).status)
789 os.remove(wks_file)
790
791 def test_extra_space(self):
792 """Test --extra-space wks option."""
793 extraspace = 1024**3
794 runCmd("wic create wictestdisk "
795 "--image-name core-image-minimal "
796 "--extra-space %i -o %s" % (extraspace ,self.resultdir))
797 wicout = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
798 self.assertEqual(1, len(wicout))
799 size = os.path.getsize(wicout[0])
800 self.assertTrue(size > extraspace, msg="Extra space not present (%s vs %s)" % (size, extraspace))
801
802 def test_no_table(self):
803 """Test --no-table wks option."""
804 wks_file = 'temp.wks'
805
806 # Absolute argument.
807 with open(wks_file, 'w') as wks:
808 wks.write("part testspace --no-table --fixed-size 16k --offset 4080k")
809 runCmd("wic create %s --image-name core-image-minimal -o %s" % (wks_file, self.resultdir))
810
811 wicout = glob(os.path.join(self.resultdir, "*.*"))
812
813 self.assertEqual(1, len(wicout))
814 size = os.path.getsize(wicout[0])
815 self.assertEqual(size, 4 * 1024 * 1024)
816
817 os.remove(wks_file)
818
819 def test_partition_hidden_attributes(self):
820 """Test --hidden wks option."""
821 wks_file = 'temp.wks'
822 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
823 try:
824 with open(wks_file, 'w') as wks:
825 wks.write("""
826part / --source rootfs --fstype=ext4
827part / --source rootfs --fstype=ext4 --hidden
828bootloader --ptable gpt""")
829
830 runCmd("wic create %s -e core-image-minimal -o %s" \
831 % (wks_file, self.resultdir))
832 wicout = os.path.join(self.resultdir, "*.direct")
833
834 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 1" % (sysroot, wicout))
835 self.assertEqual('', result.output)
836 result = runCmd("%s/usr/sbin/sfdisk --part-attrs %s 2" % (sysroot, wicout))
837 self.assertEqual('RequiredPartition', result.output)
838
839 finally:
840 os.remove(wks_file)
841
842
689class Wic2(WicTestCase): 843class Wic2(WicTestCase):
690 844
691 def test_bmap_short(self): 845 def test_bmap_short(self):
692 """Test generation of .bmap file -m option""" 846 """Test generation of .bmap file -m option"""
693 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir 847 cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir
694 runCmd(cmd) 848 runCmd(cmd)
695 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 849 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
696 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 850 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
697 851
698 def test_bmap_long(self): 852 def test_bmap_long(self):
699 """Test generation of .bmap file --bmap option""" 853 """Test generation of .bmap file --bmap option"""
700 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir 854 cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir
701 runCmd(cmd) 855 runCmd(cmd)
702 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 856 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
703 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) 857 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct.bmap"))))
704 858
705 def test_image_env(self): 859 def test_image_env(self):
706 """Test generation of <image>.env files.""" 860 """Test generation of <image>.env files."""
@@ -711,7 +865,7 @@ class Wic2(WicTestCase):
711 basename = bb_vars['IMAGE_BASENAME'] 865 basename = bb_vars['IMAGE_BASENAME']
712 self.assertEqual(basename, image) 866 self.assertEqual(basename, image)
713 path = os.path.join(imgdatadir, basename) + '.env' 867 path = os.path.join(imgdatadir, basename) + '.env'
714 self.assertTrue(os.path.isfile(path)) 868 self.assertTrue(os.path.isfile(path), msg="File %s wasn't generated as expected" % path)
715 869
716 wicvars = set(bb_vars['WICVARS'].split()) 870 wicvars = set(bb_vars['WICVARS'].split())
717 # filter out optional variables 871 # filter out optional variables
@@ -724,7 +878,7 @@ class Wic2(WicTestCase):
724 # test if variables used by wic present in the .env file 878 # test if variables used by wic present in the .env file
725 for var in wicvars: 879 for var in wicvars:
726 self.assertTrue(var in content, "%s is not in .env file" % var) 880 self.assertTrue(var in content, "%s is not in .env file" % var)
727 self.assertTrue(content[var]) 881 self.assertTrue(content[var], "%s doesn't have a value (%s)" % (var, content[var]))
728 882
729 def test_image_vars_dir_short(self): 883 def test_image_vars_dir_short(self):
730 """Test image vars directory selection -v option""" 884 """Test image vars directory selection -v option"""
@@ -736,7 +890,7 @@ class Wic2(WicTestCase):
736 "--image-name=%s -v %s -n %s -o %s" 890 "--image-name=%s -v %s -n %s -o %s"
737 % (image, imgenvdir, native_sysroot, 891 % (image, imgenvdir, native_sysroot,
738 self.resultdir)) 892 self.resultdir))
739 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 893 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
740 894
741 def test_image_vars_dir_long(self): 895 def test_image_vars_dir_long(self):
742 """Test image vars directory selection --vars option""" 896 """Test image vars directory selection --vars option"""
@@ -751,58 +905,62 @@ class Wic2(WicTestCase):
751 "--outdir %s" 905 "--outdir %s"
752 % (image, imgenvdir, native_sysroot, 906 % (image, imgenvdir, native_sysroot,
753 self.resultdir)) 907 self.resultdir))
754 self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) 908 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "wictestdisk-*direct"))))
755 909
756 @only_for_arch(['i586', 'i686', 'x86_64']) 910 # TODO this test could also work on aarch64
911 @skipIfNotArch(['i586', 'i686', 'x86_64'])
757 def test_wic_image_type(self): 912 def test_wic_image_type(self):
758 """Test building wic images by bitbake""" 913 """Test building wic images by bitbake"""
759 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 914 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
760 'MACHINE_FEATURES_append = " efi"\n' 915 'MACHINE_FEATURES:append = " efi"\n'
761 self.append_config(config) 916 self.append_config(config)
762 self.assertEqual(0, bitbake('wic-image-minimal').status) 917 image = 'wic-image-minimal'
918 bitbake(image)
763 self.remove_config(config) 919 self.remove_config(config)
764 920
765 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 921 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
766 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 922 prefix = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.' % bb_vars['IMAGE_LINK_NAME'])
767 machine = bb_vars['MACHINE'] 923
768 prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine)
769 # check if we have result image and manifests symlinks 924 # check if we have result image and manifests symlinks
770 # pointing to existing files 925 # pointing to existing files
771 for suffix in ('wic', 'manifest'): 926 for suffix in ('wic', 'manifest'):
772 path = prefix + suffix 927 path = prefix + suffix
773 self.assertTrue(os.path.islink(path)) 928 self.assertTrue(os.path.islink(path), msg="Link %s wasn't generated as expected" % path)
774 self.assertTrue(os.path.isfile(os.path.realpath(path))) 929 self.assertTrue(os.path.isfile(os.path.realpath(path)), msg="File linked to by %s wasn't generated as expected" % path)
775 930
776 @only_for_arch(['i586', 'i686', 'x86_64']) 931 # TODO this should work on aarch64
932 @skipIfNotArch(['i586', 'i686', 'x86_64'])
933 @OETestTag("runqemu")
777 def test_qemu(self): 934 def test_qemu(self):
778 """Test wic-image-minimal under qemu""" 935 """Test wic-image-minimal under qemu"""
779 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ 936 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\
780 'MACHINE_FEATURES_append = " efi"\n' 937 'MACHINE_FEATURES:append = " efi"\n'
781 self.append_config(config) 938 self.append_config(config)
782 self.assertEqual(0, bitbake('wic-image-minimal').status) 939 bitbake('wic-image-minimal')
783 self.remove_config(config) 940 self.remove_config(config)
784 941
785 with runqemu('wic-image-minimal', ssh=False) as qemu: 942 with runqemu('wic-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
786 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \ 943 cmd = "mount | grep '^/dev/' | cut -f1,3 -d ' ' | egrep -c -e '/dev/sda1 /boot' " \
787 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'" 944 "-e '/dev/root /|/dev/sda2 /' -e '/dev/sda3 /media' -e '/dev/sda4 /mnt'"
788 status, output = qemu.run_serial(cmd) 945 status, output = qemu.run_serial(cmd)
789 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 946 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
790 self.assertEqual(output, '4') 947 self.assertEqual(output, '4')
791 cmd = "grep UUID= /etc/fstab" 948 cmd = "grep UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba /etc/fstab"
792 status, output = qemu.run_serial(cmd) 949 status, output = qemu.run_serial(cmd)
793 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 950 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
794 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0') 951 self.assertEqual(output, 'UUID=2c71ef06-a81d-4735-9d3a-379b69c6bdba\t/media\text4\tdefaults\t0\t0')
795 952
796 @only_for_arch(['i586', 'i686', 'x86_64']) 953 @skipIfNotArch(['i586', 'i686', 'x86_64'])
954 @OETestTag("runqemu")
797 def test_qemu_efi(self): 955 def test_qemu_efi(self):
798 """Test core-image-minimal efi image under qemu""" 956 """Test core-image-minimal efi image under qemu"""
799 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' 957 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n'
800 self.append_config(config) 958 self.append_config(config)
801 self.assertEqual(0, bitbake('core-image-minimal ovmf').status) 959 bitbake('core-image-minimal ovmf')
802 self.remove_config(config) 960 self.remove_config(config)
803 961
804 with runqemu('core-image-minimal', ssh=False, 962 with runqemu('core-image-minimal', ssh=False,
805 runqemuparams='ovmf', image_fstype='wic') as qemu: 963 runqemuparams='nographic ovmf', image_fstype='wic') as qemu:
806 cmd = "grep sda. /proc/partitions |wc -l" 964 cmd = "grep sda. /proc/partitions |wc -l"
807 status, output = qemu.run_serial(cmd) 965 status, output = qemu.run_serial(cmd)
808 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 966 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
@@ -831,7 +989,7 @@ class Wic2(WicTestCase):
831 989
832 wksname = os.path.splitext(os.path.basename(wkspath))[0] 990 wksname = os.path.splitext(os.path.basename(wkspath))[0]
833 991
834 wicout = glob(self.resultdir + "%s-*direct" % wksname) 992 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
835 993
836 if not wicout: 994 if not wicout:
837 return (p, None) 995 return (p, None)
@@ -976,50 +1134,69 @@ class Wic2(WicTestCase):
976 size = int(size[:-3]) 1134 size = int(size[:-3])
977 self.assertGreaterEqual(size, 204800) 1135 self.assertGreaterEqual(size, 204800)
978 1136
979 @only_for_arch(['i586', 'i686', 'x86_64']) 1137 # TODO this test could also work on aarch64
1138 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1139 @OETestTag("runqemu")
980 def test_rawcopy_plugin_qemu(self): 1140 def test_rawcopy_plugin_qemu(self):
981 """Test rawcopy plugin in qemu""" 1141 """Test rawcopy plugin in qemu"""
982 # build ext4 and wic images 1142 # build ext4 and then use it for a wic image
983 for fstype in ("ext4", "wic"): 1143 config = 'IMAGE_FSTYPES = "ext4"\n'
984 config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype 1144 self.append_config(config)
985 self.append_config(config) 1145 bitbake('core-image-minimal')
986 self.assertEqual(0, bitbake('core-image-minimal').status) 1146 image_link_name = get_bb_var('IMAGE_LINK_NAME', 'core-image-minimal')
987 self.remove_config(config) 1147 self.remove_config(config)
988 1148
989 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1149 config = 'IMAGE_FSTYPES = "wic"\n' \
1150 'IMAGE_LINK_NAME_CORE_IMAGE_MINIMAL = "%s"\n'\
1151 'WKS_FILE = "test_rawcopy_plugin.wks.in"\n'\
1152 % image_link_name
1153 self.append_config(config)
1154 bitbake('core-image-minimal-mtdutils')
1155 self.remove_config(config)
1156
1157 with runqemu('core-image-minimal-mtdutils', ssh=False,
1158 runqemuparams='nographic', image_fstype='wic') as qemu:
990 cmd = "grep sda. /proc/partitions |wc -l" 1159 cmd = "grep sda. /proc/partitions |wc -l"
991 status, output = qemu.run_serial(cmd) 1160 status, output = qemu.run_serial(cmd)
992 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1161 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
993 self.assertEqual(output, '2') 1162 self.assertEqual(output, '2')
994 1163
995 def test_rawcopy_plugin(self): 1164 def _rawcopy_plugin(self, fstype):
996 """Test rawcopy plugin""" 1165 """Test rawcopy plugin"""
997 img = 'core-image-minimal' 1166 image = 'core-image-minimal'
998 machine = get_bb_var('MACHINE', img) 1167 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1168 params = ',unpack' if fstype.endswith('.gz') else ''
999 with NamedTemporaryFile("w", suffix=".wks") as wks: 1169 with NamedTemporaryFile("w", suffix=".wks") as wks:
1000 wks.writelines(['part /boot --active --source bootimg-pcbios\n', 1170 wks.write('part / --source rawcopy --sourceparams="file=%s.%s%s"\n'\
1001 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\ 1171 % (bb_vars['IMAGE_LINK_NAME'], fstype, params))
1002 % (img, machine),
1003 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1004 wks.flush() 1172 wks.flush()
1005 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1173 cmd = "wic create %s -e %s -o %s" % (wks.name, image, self.resultdir)
1006 runCmd(cmd) 1174 runCmd(cmd)
1007 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1175 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1008 out = glob(self.resultdir + "%s-*direct" % wksname) 1176 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1009 self.assertEqual(1, len(out)) 1177 self.assertEqual(1, len(out))
1010 1178
1179 def test_rawcopy_plugin(self):
1180 self._rawcopy_plugin('ext4')
1181
1182 def test_rawcopy_plugin_unpack(self):
1183 fstype = 'ext4.gz'
1184 config = 'IMAGE_FSTYPES = "%s"\n' % fstype
1185 self.append_config(config)
1186 self.assertEqual(0, bitbake('core-image-minimal').status)
1187 self.remove_config(config)
1188 self._rawcopy_plugin(fstype)
1189
1011 def test_empty_plugin(self): 1190 def test_empty_plugin(self):
1012 """Test empty plugin""" 1191 """Test empty plugin"""
1013 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n' 1192 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_empty_plugin.wks"\n'
1014 self.append_config(config) 1193 self.append_config(config)
1015 self.assertEqual(0, bitbake('core-image-minimal').status) 1194 image = 'core-image-minimal'
1195 bitbake(image)
1016 self.remove_config(config) 1196 self.remove_config(config)
1017 1197 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1018 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) 1198 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1019 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] 1199 self.assertTrue(os.path.exists(image_path), msg="Image file %s wasn't generated as expected" % image_path)
1020 machine = bb_vars['MACHINE']
1021 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1022 self.assertEqual(True, os.path.exists(image_path))
1023 1200
1024 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1201 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1025 1202
@@ -1028,15 +1205,17 @@ class Wic2(WicTestCase):
1028 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot)) 1205 result = runCmd("wic ls %s -n %s | awk -F ' ' '{print $1 \" \" $5}' | grep '^2' | wc -w" % (image_path, sysroot))
1029 self.assertEqual('1', result.output) 1206 self.assertEqual('1', result.output)
1030 1207
1031 @only_for_arch(['i586', 'i686', 'x86_64']) 1208 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1209 @OETestTag("runqemu")
1032 def test_biosplusefi_plugin_qemu(self): 1210 def test_biosplusefi_plugin_qemu(self):
1033 """Test biosplusefi plugin in qemu""" 1211 """Test biosplusefi plugin in qemu"""
1034 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1212 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1035 self.append_config(config) 1213 self.append_config(config)
1036 self.assertEqual(0, bitbake('core-image-minimal').status) 1214 bitbake('core-image-minimal')
1037 self.remove_config(config) 1215 self.remove_config(config)
1038 1216
1039 with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: 1217 with runqemu('core-image-minimal', ssh=False,
1218 runqemuparams='nographic', image_fstype='wic') as qemu:
1040 # Check that we have ONLY two /dev/sda* partitions (/boot and /) 1219 # Check that we have ONLY two /dev/sda* partitions (/boot and /)
1041 cmd = "grep sda. /proc/partitions | wc -l" 1220 cmd = "grep sda. /proc/partitions | wc -l"
1042 status, output = qemu.run_serial(cmd) 1221 status, output = qemu.run_serial(cmd)
@@ -1059,7 +1238,7 @@ class Wic2(WicTestCase):
1059 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) 1238 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1060 self.assertEqual(output, '*') 1239 self.assertEqual(output, '*')
1061 1240
1062 @only_for_arch(['i586', 'i686', 'x86_64']) 1241 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1063 def test_biosplusefi_plugin(self): 1242 def test_biosplusefi_plugin(self):
1064 """Test biosplusefi plugin""" 1243 """Test biosplusefi plugin"""
1065 # Wic generation below may fail depending on the order of the unittests 1244 # Wic generation below may fail depending on the order of the unittests
@@ -1068,9 +1247,9 @@ class Wic2(WicTestCase):
1068 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir() 1247 # If an image hasn't been built yet, directory ${STAGING_DATADIR}/syslinux won't exists and _get_bootimg_dir()
1069 # will raise with "Couldn't find correct bootimg_dir" 1248 # will raise with "Couldn't find correct bootimg_dir"
1070 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call 1249 # The easiest way to work-around this issue is to make sure we already built an image here, hence the bitbake call
1071 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES_append = " efi"\n' 1250 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "test_biosplusefi_plugin.wks"\nMACHINE_FEATURES:append = " efi"\n'
1072 self.append_config(config) 1251 self.append_config(config)
1073 self.assertEqual(0, bitbake('core-image-minimal').status) 1252 bitbake('core-image-minimal')
1074 self.remove_config(config) 1253 self.remove_config(config)
1075 1254
1076 img = 'core-image-minimal' 1255 img = 'core-image-minimal'
@@ -1082,9 +1261,60 @@ class Wic2(WicTestCase):
1082 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1261 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1083 runCmd(cmd) 1262 runCmd(cmd)
1084 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1263 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1085 out = glob(self.resultdir + "%s-*.direct" % wksname) 1264 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1265 self.assertEqual(1, len(out))
1266
1267 @skipIfNotArch(['i586', 'i686', 'x86_64', 'aarch64'])
1268 def test_uefi_kernel(self):
1269 """ Test uefi-kernel in wic """
1270 config = 'IMAGE_EFI_BOOT_FILES="/etc/fstab;testfile"\nIMAGE_FSTYPES = "wic"\nWKS_FILE = "test_uefikernel.wks"\nMACHINE_FEATURES:append = " efi"\n'
1271 self.append_config(config)
1272 bitbake('core-image-minimal')
1273 self.remove_config(config)
1274
1275 img = 'core-image-minimal'
1276 with NamedTemporaryFile("w", suffix=".wks") as wks:
1277 wks.writelines(['part /boot --source bootimg-efi --sourceparams="loader=uefi-kernel"\n'
1278 'part / --source rootfs --fstype=ext4 --align 1024 --use-uuid\n'\
1279 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n'])
1280 wks.flush()
1281 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1282 runCmd(cmd)
1283 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1284 out = glob(os.path.join(self.resultdir, "%s-*.direct" % wksname))
1086 self.assertEqual(1, len(out)) 1285 self.assertEqual(1, len(out))
1087 1286
1287 # TODO this test could also work on aarch64
1288 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1289 @OETestTag("runqemu")
1290 def test_efi_plugin_unified_kernel_image_qemu(self):
1291 """Test efi plugin's Unified Kernel Image feature in qemu"""
1292 config = 'IMAGE_FSTYPES = "wic"\n'\
1293 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\
1294 'WKS_FILE = "test_efi_plugin.wks"\n'\
1295 'MACHINE_FEATURES:append = " efi"\n'
1296 self.append_config(config)
1297 bitbake('core-image-minimal core-image-minimal-initramfs ovmf')
1298 self.remove_config(config)
1299
1300 with runqemu('core-image-minimal', ssh=False,
1301 runqemuparams='nographic ovmf', image_fstype='wic') as qemu:
1302 # Check that /boot has EFI bootx64.efi (required for EFI)
1303 cmd = "ls /boot/EFI/BOOT/bootx64.efi | wc -l"
1304 status, output = qemu.run_serial(cmd)
1305 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1306 self.assertEqual(output, '1')
1307 # Check that /boot has EFI/Linux/linux.efi (required for Unified Kernel Images auto detection)
1308 cmd = "ls /boot/EFI/Linux/linux.efi | wc -l"
1309 status, output = qemu.run_serial(cmd)
1310 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1311 self.assertEqual(output, '1')
1312 # Check that /boot doesn't have loader/entries/boot.conf (Unified Kernel Images are auto detected by the bootloader)
1313 cmd = "ls /boot/loader/entries/boot.conf 2&>/dev/null | wc -l"
1314 status, output = qemu.run_serial(cmd)
1315 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1316 self.assertEqual(output, '0')
1317
1088 def test_fs_types(self): 1318 def test_fs_types(self):
1089 """Test filesystem types for empty and not empty partitions""" 1319 """Test filesystem types for empty and not empty partitions"""
1090 img = 'core-image-minimal' 1320 img = 'core-image-minimal'
@@ -1101,7 +1331,7 @@ class Wic2(WicTestCase):
1101 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) 1331 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1102 runCmd(cmd) 1332 runCmd(cmd)
1103 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1333 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1104 out = glob(self.resultdir + "%s-*direct" % wksname) 1334 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1105 self.assertEqual(1, len(out)) 1335 self.assertEqual(1, len(out))
1106 1336
1107 def test_kickstart_parser(self): 1337 def test_kickstart_parser(self):
@@ -1113,7 +1343,7 @@ class Wic2(WicTestCase):
1113 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) 1343 cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir)
1114 runCmd(cmd) 1344 runCmd(cmd)
1115 wksname = os.path.splitext(os.path.basename(wks.name))[0] 1345 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1116 out = glob(self.resultdir + "%s-*direct" % wksname) 1346 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1117 self.assertEqual(1, len(out)) 1347 self.assertEqual(1, len(out))
1118 1348
1119 def test_image_bootpart_globbed(self): 1349 def test_image_bootpart_globbed(self):
@@ -1124,11 +1354,11 @@ class Wic2(WicTestCase):
1124 self.append_config(config) 1354 self.append_config(config)
1125 runCmd(cmd) 1355 runCmd(cmd)
1126 self.remove_config(config) 1356 self.remove_config(config)
1127 self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) 1357 self.assertEqual(1, len(glob(os.path.join(self.resultdir, "sdimage-bootpart-*direct"))))
1128 1358
1129 def test_sparse_copy(self): 1359 def test_sparse_copy(self):
1130 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" 1360 """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs"""
1131 libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic') 1361 libpath = os.path.join(self.td['COREBASE'], 'scripts', 'lib', 'wic')
1132 sys.path.insert(0, libpath) 1362 sys.path.insert(0, libpath)
1133 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp 1363 from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp
1134 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: 1364 with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse:
@@ -1154,12 +1384,148 @@ class Wic2(WicTestCase):
1154 self.assertEqual(dest_stat.st_blocks, 8) 1384 self.assertEqual(dest_stat.st_blocks, 8)
1155 os.unlink(dest) 1385 os.unlink(dest)
1156 1386
1387 def test_mkfs_extraopts(self):
1388 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1389 img = 'core-image-minimal'
1390 with NamedTemporaryFile("w", suffix=".wks") as wks:
1391 wks.writelines(
1392 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1393 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1394 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1395 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1396 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1397 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1398 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1399 wks.flush()
1400 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1401 runCmd(cmd)
1402 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1403 out = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1404 self.assertEqual(1, len(out))
1405
1406 @skipIfNotArch(['i586', 'i686', 'x86_64'])
1407 @OETestTag("runqemu")
1408 def test_expand_mbr_image(self):
1409 """Test wic write --expand command for mbr image"""
1410 # build an image
1411 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1412 self.append_config(config)
1413 image = 'core-image-minimal'
1414 bitbake(image)
1415
1416 # get path to the image
1417 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1418 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1419
1420 self.remove_config(config)
1421
1422 try:
1423 # expand image to 1G
1424 new_image_path = None
1425 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1426 dir=bb_vars['DEPLOY_DIR_IMAGE'], delete=False) as sparse:
1427 sparse.truncate(1024 ** 3)
1428 new_image_path = sparse.name
1429
1430 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1431 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1432 runCmd(cmd)
1433
1434 # check if partitions are expanded
1435 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1436 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1437 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1438 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1439 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1440 self.assertTrue(orig_sizes[1] < exp_sizes[1], msg="Parition size wasn't enlarged (%s vs %s)" % (orig_sizes[1], exp_sizes[1]))
1441
1442 # Check if all free space is partitioned
1443 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1444 self.assertIn("0 B, 0 bytes, 0 sectors", result.output)
1445
1446 os.rename(image_path, image_path + '.bak')
1447 os.rename(new_image_path, image_path)
1448
1449 # Check if it boots in qemu
1450 with runqemu('core-image-minimal', ssh=False, runqemuparams='nographic') as qemu:
1451 cmd = "ls /etc/"
1452 status, output = qemu.run_serial('true')
1453 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1454 finally:
1455 if os.path.exists(new_image_path):
1456 os.unlink(new_image_path)
1457 if os.path.exists(image_path + '.bak'):
1458 os.rename(image_path + '.bak', image_path)
1459
1460 def test_gpt_partition_name(self):
1461 """Test --part-name argument to set partition name in GPT table"""
1462 config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "test_gpt_partition_name.wks"\n'
1463 self.append_config(config)
1464 image = 'core-image-minimal'
1465 bitbake(image)
1466 self.remove_config(config)
1467 deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE')
1468 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], image)
1469 image_path = os.path.join(bb_vars['DEPLOY_DIR_IMAGE'], '%s.wic' % bb_vars['IMAGE_LINK_NAME'])
1470
1471 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1472
1473 # Image is created
1474 self.assertTrue(os.path.exists(image_path), "image file %s doesn't exist" % image_path)
1475
1476 # Check the names of the three partitions
1477 # as listed in test_gpt_partition_name.wks
1478 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 1" % (sysroot, image_path))
1479 self.assertEqual('boot-A', result.output)
1480 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 2" % (sysroot, image_path))
1481 self.assertEqual('root-A', result.output)
1482 # When the --part-name is not defined, the partition name is equal to the --label
1483 result = runCmd("%s/usr/sbin/sfdisk --part-label %s 3" % (sysroot, image_path))
1484 self.assertEqual('ext-space', result.output)
1485
1486 def test_empty_zeroize_plugin(self):
1487 img = 'core-image-minimal'
1488 expected_size = [ 1024*1024, # 1M
1489 512*1024, # 512K
1490 2*1024*1024] # 2M
1491 # Check combination of sourceparams
1492 with NamedTemporaryFile("w", suffix=".wks") as wks:
1493 wks.writelines(
1494 ['part empty --source empty --sourceparams="fill" --ondisk sda --fixed-size 1M\n',
1495 'part empty --source empty --sourceparams="size=512K" --ondisk sda --size 1M --align 1024\n',
1496 'part empty --source empty --sourceparams="size=2048k,bs=512K" --ondisk sda --size 4M --align 1024\n'
1497 ])
1498 wks.flush()
1499 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1500 runCmd(cmd)
1501 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1502 wicout = glob(os.path.join(self.resultdir, "%s-*direct" % wksname))
1503 # Skip the complete image and just look at the single partitions
1504 for idx, value in enumerate(wicout[1:]):
1505 self.logger.info(wicout[idx])
1506 # Check if partitions are actually zeroized
1507 with open(wicout[idx], mode="rb") as fd:
1508 ba = bytearray(fd.read())
1509 for b in ba:
1510 self.assertEqual(b, 0)
1511 self.assertEqual(expected_size[idx], os.path.getsize(wicout[idx]))
1512
1513 # Check inconsistancy check between "fill" and "--size" parameter
1514 with NamedTemporaryFile("w", suffix=".wks") as wks:
1515 wks.writelines(['part empty --source empty --sourceparams="fill" --ondisk sda --size 1M\n'])
1516 wks.flush()
1517 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1518 result = runCmd(cmd, ignore_status=True)
1519 self.assertIn("Source parameter 'fill' only works with the '--fixed-size' option, exiting.", result.output)
1520 self.assertNotEqual(0, result.status)
1521
1522class ModifyTests(WicTestCase):
1157 def test_wic_ls(self): 1523 def test_wic_ls(self):
1158 """Test listing image content using 'wic ls'""" 1524 """Test listing image content using 'wic ls'"""
1159 runCmd("wic create wictestdisk " 1525 runCmd("wic create wictestdisk "
1160 "--image-name=core-image-minimal " 1526 "--image-name=core-image-minimal "
1161 "-D -o %s" % self.resultdir) 1527 "-D -o %s" % self.resultdir)
1162 images = glob(self.resultdir + "wictestdisk-*.direct") 1528 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1163 self.assertEqual(1, len(images)) 1529 self.assertEqual(1, len(images))
1164 1530
1165 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1531 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1177,7 +1543,7 @@ class Wic2(WicTestCase):
1177 runCmd("wic create wictestdisk " 1543 runCmd("wic create wictestdisk "
1178 "--image-name=core-image-minimal " 1544 "--image-name=core-image-minimal "
1179 "-D -o %s" % self.resultdir) 1545 "-D -o %s" % self.resultdir)
1180 images = glob(self.resultdir + "wictestdisk-*.direct") 1546 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1181 self.assertEqual(1, len(images)) 1547 self.assertEqual(1, len(images))
1182 1548
1183 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1549 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1195,7 +1561,7 @@ class Wic2(WicTestCase):
1195 # check if file is there 1561 # check if file is there
1196 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1562 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1197 self.assertEqual(7, len(result.output.split('\n'))) 1563 self.assertEqual(7, len(result.output.split('\n')))
1198 self.assertTrue(os.path.basename(testfile.name) in result.output) 1564 self.assertIn(os.path.basename(testfile.name), result.output)
1199 1565
1200 # prepare directory 1566 # prepare directory
1201 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir') 1567 testdir = os.path.join(self.resultdir, 'wic-test-cp-dir')
@@ -1209,13 +1575,13 @@ class Wic2(WicTestCase):
1209 # check if directory is there 1575 # check if directory is there
1210 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot)) 1576 result = runCmd("wic ls %s:1/ -n %s" % (images[0], sysroot))
1211 self.assertEqual(8, len(result.output.split('\n'))) 1577 self.assertEqual(8, len(result.output.split('\n')))
1212 self.assertTrue(os.path.basename(testdir) in result.output) 1578 self.assertIn(os.path.basename(testdir), result.output)
1213 1579
1214 # copy the file from the partition and check if it success 1580 # copy the file from the partition and check if it success
1215 dest = '%s-cp' % testfile.name 1581 dest = '%s-cp' % testfile.name
1216 runCmd("wic cp %s:1/%s %s -n %s" % (images[0], 1582 runCmd("wic cp %s:1/%s %s -n %s" % (images[0],
1217 os.path.basename(testfile.name), dest, sysroot)) 1583 os.path.basename(testfile.name), dest, sysroot))
1218 self.assertTrue(os.path.exists(dest)) 1584 self.assertTrue(os.path.exists(dest), msg="File %s wasn't generated as expected" % dest)
1219 1585
1220 1586
1221 def test_wic_rm(self): 1587 def test_wic_rm(self):
@@ -1223,105 +1589,35 @@ class Wic2(WicTestCase):
1223 runCmd("wic create mkefidisk " 1589 runCmd("wic create mkefidisk "
1224 "--image-name=core-image-minimal " 1590 "--image-name=core-image-minimal "
1225 "-D -o %s" % self.resultdir) 1591 "-D -o %s" % self.resultdir)
1226 images = glob(self.resultdir + "mkefidisk-*.direct") 1592 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1227 self.assertEqual(1, len(images)) 1593 self.assertEqual(1, len(images))
1228 1594
1229 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1595 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1596 # Not bulletproof but hopefully sufficient
1597 kerneltype = get_bb_var('KERNEL_IMAGETYPE', 'virtual/kernel')
1230 1598
1231 # list directory content of the first partition 1599 # list directory content of the first partition
1232 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1600 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1233 self.assertIn('\nBZIMAGE ', result.output) 1601 self.assertIn('\n%s ' % kerneltype.upper(), result.output)
1234 self.assertIn('\nEFI <DIR> ', result.output) 1602 self.assertIn('\nEFI <DIR> ', result.output)
1235 1603
1236 # remove file 1604 # remove file. EFI partitions are case-insensitive so exercise that too
1237 runCmd("wic rm %s:1/bzimage -n %s" % (images[0], sysroot)) 1605 runCmd("wic rm %s:1/%s -n %s" % (images[0], kerneltype.lower(), sysroot))
1238 1606
1239 # remove directory 1607 # remove directory
1240 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot)) 1608 runCmd("wic rm %s:1/efi -n %s" % (images[0], sysroot))
1241 1609
1242 # check if they're removed 1610 # check if they're removed
1243 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot)) 1611 result = runCmd("wic ls %s:1 -n %s" % (images[0], sysroot))
1244 self.assertNotIn('\nBZIMAGE ', result.output) 1612 self.assertNotIn('\n%s ' % kerneltype.upper(), result.output)
1245 self.assertNotIn('\nEFI <DIR> ', result.output) 1613 self.assertNotIn('\nEFI <DIR> ', result.output)
1246 1614
1247 def test_mkfs_extraopts(self):
1248 """Test wks option --mkfs-extraopts for empty and not empty partitions"""
1249 img = 'core-image-minimal'
1250 with NamedTemporaryFile("w", suffix=".wks") as wks:
1251 wks.writelines(
1252 ['part ext2 --fstype ext2 --source rootfs --mkfs-extraopts "-D -F -i 8192"\n',
1253 "part btrfs --fstype btrfs --source rootfs --size 40M --mkfs-extraopts='--quiet'\n",
1254 'part squash --fstype squashfs --source rootfs --mkfs-extraopts "-no-sparse -b 4096"\n',
1255 'part emptyvfat --fstype vfat --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1256 'part emptymsdos --fstype msdos --size 1M --mkfs-extraopts "-S 1024 -s 64"\n',
1257 'part emptyext2 --fstype ext2 --size 1M --mkfs-extraopts "-D -F -i 8192"\n',
1258 'part emptybtrfs --fstype btrfs --size 100M --mkfs-extraopts "--mixed -K"\n'])
1259 wks.flush()
1260 cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir)
1261 runCmd(cmd)
1262 wksname = os.path.splitext(os.path.basename(wks.name))[0]
1263 out = glob(self.resultdir + "%s-*direct" % wksname)
1264 self.assertEqual(1, len(out))
1265
1266 def test_expand_mbr_image(self):
1267 """Test wic write --expand command for mbr image"""
1268 # build an image
1269 config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "directdisk.wks"\n'
1270 self.append_config(config)
1271 self.assertEqual(0, bitbake('core-image-minimal').status)
1272
1273 # get path to the image
1274 bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE'])
1275 deploy_dir = bb_vars['DEPLOY_DIR_IMAGE']
1276 machine = bb_vars['MACHINE']
1277 image_path = os.path.join(deploy_dir, 'core-image-minimal-%s.wic' % machine)
1278
1279 self.remove_config(config)
1280
1281 try:
1282 # expand image to 1G
1283 new_image_path = None
1284 with NamedTemporaryFile(mode='wb', suffix='.wic.exp',
1285 dir=deploy_dir, delete=False) as sparse:
1286 sparse.truncate(1024 ** 3)
1287 new_image_path = sparse.name
1288
1289 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1290 cmd = "wic write -n %s --expand 1:0 %s %s" % (sysroot, image_path, new_image_path)
1291 runCmd(cmd)
1292
1293 # check if partitions are expanded
1294 orig = runCmd("wic ls %s -n %s" % (image_path, sysroot))
1295 exp = runCmd("wic ls %s -n %s" % (new_image_path, sysroot))
1296 orig_sizes = [int(line.split()[3]) for line in orig.output.split('\n')[1:]]
1297 exp_sizes = [int(line.split()[3]) for line in exp.output.split('\n')[1:]]
1298 self.assertEqual(orig_sizes[0], exp_sizes[0]) # first partition is not resized
1299 self.assertTrue(orig_sizes[1] < exp_sizes[1])
1300
1301 # Check if all free space is partitioned
1302 result = runCmd("%s/usr/sbin/sfdisk -F %s" % (sysroot, new_image_path))
1303 self.assertTrue("0 B, 0 bytes, 0 sectors" in result.output)
1304
1305 os.rename(image_path, image_path + '.bak')
1306 os.rename(new_image_path, image_path)
1307
1308 # Check if it boots in qemu
1309 with runqemu('core-image-minimal', ssh=False) as qemu:
1310 cmd = "ls /etc/"
1311 status, output = qemu.run_serial('true')
1312 self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output))
1313 finally:
1314 if os.path.exists(new_image_path):
1315 os.unlink(new_image_path)
1316 if os.path.exists(image_path + '.bak'):
1317 os.rename(image_path + '.bak', image_path)
1318
1319 def test_wic_ls_ext(self): 1615 def test_wic_ls_ext(self):
1320 """Test listing content of the ext partition using 'wic ls'""" 1616 """Test listing content of the ext partition using 'wic ls'"""
1321 runCmd("wic create wictestdisk " 1617 runCmd("wic create wictestdisk "
1322 "--image-name=core-image-minimal " 1618 "--image-name=core-image-minimal "
1323 "-D -o %s" % self.resultdir) 1619 "-D -o %s" % self.resultdir)
1324 images = glob(self.resultdir + "wictestdisk-*.direct") 1620 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1325 self.assertEqual(1, len(images)) 1621 self.assertEqual(1, len(images))
1326 1622
1327 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1623 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1329,14 +1625,14 @@ class Wic2(WicTestCase):
1329 # list directory content of the second ext4 partition 1625 # list directory content of the second ext4 partition
1330 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1626 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1331 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset( 1627 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(
1332 set(line.split()[-1] for line in result.output.split('\n') if line))) 1628 set(line.split()[-1] for line in result.output.split('\n') if line)), msg="Expected directories not present %s" % result.output)
1333 1629
1334 def test_wic_cp_ext(self): 1630 def test_wic_cp_ext(self):
1335 """Test copy files and directories to the ext partition.""" 1631 """Test copy files and directories to the ext partition."""
1336 runCmd("wic create wictestdisk " 1632 runCmd("wic create wictestdisk "
1337 "--image-name=core-image-minimal " 1633 "--image-name=core-image-minimal "
1338 "-D -o %s" % self.resultdir) 1634 "-D -o %s" % self.resultdir)
1339 images = glob(self.resultdir + "wictestdisk-*.direct") 1635 images = glob(os.path.join(self.resultdir, "wictestdisk-*.direct"))
1340 self.assertEqual(1, len(images)) 1636 self.assertEqual(1, len(images))
1341 1637
1342 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1638 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
@@ -1344,7 +1640,7 @@ class Wic2(WicTestCase):
1344 # list directory content of the ext4 partition 1640 # list directory content of the ext4 partition
1345 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1641 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1346 dirs = set(line.split()[-1] for line in result.output.split('\n') if line) 1642 dirs = set(line.split()[-1] for line in result.output.split('\n') if line)
1347 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs)) 1643 self.assertTrue(set(['bin', 'home', 'proc', 'usr', 'var', 'dev', 'lib', 'sbin']).issubset(dirs), msg="Expected directories not present %s" % dirs)
1348 1644
1349 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile: 1645 with NamedTemporaryFile("w", suffix=".wic-cp") as testfile:
1350 testfile.write("test") 1646 testfile.write("test")
@@ -1359,12 +1655,12 @@ class Wic2(WicTestCase):
1359 1655
1360 # check if the file to copy is in the partition 1656 # check if the file to copy is in the partition
1361 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1657 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1362 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1658 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1363 1659
1364 # copy file from the partition, replace the temporary file content with it and 1660 # copy file from the partition, replace the temporary file content with it and
1365 # check for the file size to validate the copy 1661 # check for the file size to validate the copy
1366 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot)) 1662 runCmd("wic cp %s:2/etc/fstab %s -n %s" % (images[0], testfile.name, sysroot))
1367 self.assertTrue(os.stat(testfile.name).st_size > 0) 1663 self.assertTrue(os.stat(testfile.name).st_size > 0, msg="Filesize not as expected %s" % os.stat(testfile.name).st_size)
1368 1664
1369 1665
1370 def test_wic_rm_ext(self): 1666 def test_wic_rm_ext(self):
@@ -1372,25 +1668,25 @@ class Wic2(WicTestCase):
1372 runCmd("wic create mkefidisk " 1668 runCmd("wic create mkefidisk "
1373 "--image-name=core-image-minimal " 1669 "--image-name=core-image-minimal "
1374 "-D -o %s" % self.resultdir) 1670 "-D -o %s" % self.resultdir)
1375 images = glob(self.resultdir + "mkefidisk-*.direct") 1671 images = glob(os.path.join(self.resultdir, "mkefidisk-*.direct"))
1376 self.assertEqual(1, len(images)) 1672 self.assertEqual(1, len(images))
1377 1673
1378 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools') 1674 sysroot = get_bb_var('RECIPE_SYSROOT_NATIVE', 'wic-tools')
1379 1675
1380 # list directory content of the /etc directory on ext4 partition 1676 # list directory content of the /etc directory on ext4 partition
1381 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1677 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1382 self.assertTrue('fstab' in [line.split()[-1] for line in result.output.split('\n') if line]) 1678 self.assertIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1383 1679
1384 # remove file 1680 # remove file
1385 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot)) 1681 runCmd("wic rm %s:2/etc/fstab -n %s" % (images[0], sysroot))
1386 1682
1387 # check if it's removed 1683 # check if it's removed
1388 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot)) 1684 result = runCmd("wic ls %s:2/etc/ -n %s" % (images[0], sysroot))
1389 self.assertTrue('fstab' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1685 self.assertNotIn('fstab', [line.split()[-1] for line in result.output.split('\n') if line])
1390 1686
1391 # remove non-empty directory 1687 # remove non-empty directory
1392 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot)) 1688 runCmd("wic rm -r %s:2/etc/ -n %s" % (images[0], sysroot))
1393 1689
1394 # check if it's removed 1690 # check if it's removed
1395 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot)) 1691 result = runCmd("wic ls %s:2/ -n %s" % (images[0], sysroot))
1396 self.assertTrue('etc' not in [line.split()[-1] for line in result.output.split('\n') if line]) 1692 self.assertNotIn('etc', [line.split()[-1] for line in result.output.split('\n') if line])
diff --git a/meta/lib/oeqa/selftest/cases/wrapper.py b/meta/lib/oeqa/selftest/cases/wrapper.py
new file mode 100644
index 0000000000..f2be44262c
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/wrapper.py
@@ -0,0 +1,16 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6from oeqa.selftest.case import OESelftestTestCase
7from oeqa.utils.commands import bitbake
8
9class WrapperTests(OESelftestTestCase):
10 def test_shebang_wrapper(self):
11 """
12 Summary: Build a recipe which will fail if the cmdline_shebang_wrapper function is defective.
13 Expected: Exit status to be 0.
14 Author: Paulo Neves <ptsneves@gmail.com>
15 """
16 res = bitbake("cmdline-shebang-wrapper-test -c install", ignore_status=False)
diff --git a/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
new file mode 100644
index 0000000000..312edb6431
--- /dev/null
+++ b/meta/lib/oeqa/selftest/cases/yoctotestresultsquerytests.py
@@ -0,0 +1,39 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7import os
8import sys
9import subprocess
10import shutil
11from oeqa.selftest.case import OESelftestTestCase
12from yocto_testresults_query import get_sha1, create_workdir
13basepath = os.path.abspath(os.path.dirname(__file__) + '/../../../../../')
14lib_path = basepath + '/scripts/lib'
15sys.path = sys.path + [lib_path]
16
17
18class TestResultsQueryTests(OESelftestTestCase):
19 def test_get_sha1(self):
20 test_data_get_sha1 = [
21 {"input": "yocto-4.0", "expected": "00cfdde791a0176c134f31e5a09eff725e75b905"},
22 {"input": "4.1_M1", "expected": "95066dde6861ee08fdb505ab3e0422156cc24fae"},
23 ]
24 for data in test_data_get_sha1:
25 test_name = data["input"]
26 with self.subTest(f"Test SHA1 from {test_name}"):
27 self.assertEqual(
28 get_sha1(basepath, data["input"]), data["expected"])
29
30 def test_create_workdir(self):
31 workdir = create_workdir()
32 try:
33 url = subprocess.check_output(
34 ["git", "-C", workdir, "remote", "get-url", "origin"]).strip().decode("utf-8")
35 except:
36 shutil.rmtree(workdir, ignore_errors=True)
37 self.fail(f"Can not execute git commands in {workdir}")
38 shutil.rmtree(workdir)
39 self.assertEqual(url, "git://git.yoctoproject.org/yocto-testresults")
diff --git a/meta/lib/oeqa/selftest/context.py b/meta/lib/oeqa/selftest/context.py
index 1659926975..99186175e5 100644
--- a/meta/lib/oeqa/selftest/context.py
+++ b/meta/lib/oeqa/selftest/context.py
@@ -16,19 +16,32 @@ from random import choice
16import oeqa 16import oeqa
17import oe 17import oe
18import bb.utils 18import bb.utils
19import bb.tinfoil
19 20
20from oeqa.core.context import OETestContext, OETestContextExecutor 21from oeqa.core.context import OETestContext, OETestContextExecutor
21from oeqa.core.exception import OEQAPreRun, OEQATestNotFound 22from oeqa.core.exception import OEQAPreRun, OEQATestNotFound
22 23
23from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer 24from oeqa.utils.commands import runCmd, get_bb_vars, get_test_layer
24 25
26OESELFTEST_METADATA=["run_all_tests", "run_tests", "skips", "machine", "select_tags", "exclude_tags"]
27
28def get_oeselftest_metadata(args):
29 result = {}
30 raw_args = vars(args)
31 for metadata in OESELFTEST_METADATA:
32 if metadata in raw_args:
33 result[metadata] = raw_args[metadata]
34
35 return result
36
25class NonConcurrentTestSuite(unittest.TestSuite): 37class NonConcurrentTestSuite(unittest.TestSuite):
26 def __init__(self, suite, processes, setupfunc, removefunc): 38 def __init__(self, suite, processes, setupfunc, removefunc, bb_vars):
27 super().__init__([suite]) 39 super().__init__([suite])
28 self.processes = processes 40 self.processes = processes
29 self.suite = suite 41 self.suite = suite
30 self.setupfunc = setupfunc 42 self.setupfunc = setupfunc
31 self.removefunc = removefunc 43 self.removefunc = removefunc
44 self.bb_vars = bb_vars
32 45
33 def run(self, result): 46 def run(self, result):
34 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite) 47 (builddir, newbuilddir) = self.setupfunc("-st", None, self.suite)
@@ -39,7 +52,7 @@ class NonConcurrentTestSuite(unittest.TestSuite):
39 52
40def removebuilddir(d): 53def removebuilddir(d):
41 delay = 5 54 delay = 5
42 while delay and os.path.exists(d + "/bitbake.lock"): 55 while delay and (os.path.exists(d + "/bitbake.lock") or os.path.exists(d + "/cache/hashserv.db-wal")):
43 time.sleep(1) 56 time.sleep(1)
44 delay = delay - 1 57 delay = delay - 1
45 # Deleting these directories takes a lot of time, use autobuilder 58 # Deleting these directories takes a lot of time, use autobuilder
@@ -57,8 +70,6 @@ class OESelftestTestContext(OETestContext):
57 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None): 70 def __init__(self, td=None, logger=None, machines=None, config_paths=None, newbuilddir=None, keep_builddir=None):
58 super(OESelftestTestContext, self).__init__(td, logger) 71 super(OESelftestTestContext, self).__init__(td, logger)
59 72
60 self.machines = machines
61 self.custommachine = None
62 self.config_paths = config_paths 73 self.config_paths = config_paths
63 self.newbuilddir = newbuilddir 74 self.newbuilddir = newbuilddir
64 75
@@ -67,10 +78,15 @@ class OESelftestTestContext(OETestContext):
67 else: 78 else:
68 self.removebuilddir = removebuilddir 79 self.removebuilddir = removebuilddir
69 80
81 def set_variables(self, vars):
82 self.bb_vars = vars
83
70 def setup_builddir(self, suffix, selftestdir, suite): 84 def setup_builddir(self, suffix, selftestdir, suite):
85 sstatedir = self.bb_vars['SSTATE_DIR']
86
71 builddir = os.environ['BUILDDIR'] 87 builddir = os.environ['BUILDDIR']
72 if not selftestdir: 88 if not selftestdir:
73 selftestdir = get_test_layer() 89 selftestdir = get_test_layer(self.bb_vars['BBLAYERS'])
74 if self.newbuilddir: 90 if self.newbuilddir:
75 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix) 91 newbuilddir = os.path.join(self.newbuilddir, 'build' + suffix)
76 else: 92 else:
@@ -86,16 +102,29 @@ class OESelftestTestContext(OETestContext):
86 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache") 102 oe.path.copytree(builddir + "/cache", newbuilddir + "/cache")
87 oe.path.copytree(selftestdir, newselftestdir) 103 oe.path.copytree(selftestdir, newselftestdir)
88 104
105 subprocess.check_output("git init && git add * && git commit -a -m 'initial'", cwd=newselftestdir, shell=True)
106
107 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
108 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
109
110 # Relative paths in BBLAYERS only works when the new build dir share the same ascending node
111 if self.newbuilddir:
112 bblayers = subprocess.check_output("bitbake-getvar --value BBLAYERS | tail -1", cwd=builddir, shell=True, text=True)
113 if '..' in bblayers:
114 bblayers_abspath = [os.path.abspath(path) for path in bblayers.split()]
115 with open("%s/conf/bblayers.conf" % newbuilddir, "a") as f:
116 newbblayers = "# new bblayers to be used by selftest in the new build dir '%s'\n" % newbuilddir
117 newbblayers += 'BBLAYERS = "%s"\n' % ' '.join(bblayers_abspath)
118 f.write(newbblayers)
119
89 for e in os.environ: 120 for e in os.environ:
90 if builddir + "/" in os.environ[e]: 121 if builddir + "/" in os.environ[e]:
91 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/") 122 os.environ[e] = os.environ[e].replace(builddir + "/", newbuilddir + "/")
92 if os.environ[e].endswith(builddir): 123 if os.environ[e].endswith(builddir):
93 os.environ[e] = os.environ[e].replace(builddir, newbuilddir) 124 os.environ[e] = os.environ[e].replace(builddir, newbuilddir)
94 125
95 subprocess.check_output("git init; git add *; git commit -a -m 'initial'", cwd=newselftestdir, shell=True) 126 # Set SSTATE_DIR to match the parent SSTATE_DIR
96 127 subprocess.check_output("echo 'SSTATE_DIR ?= \"%s\"' >> %s/conf/local.conf" % (sstatedir, newbuilddir), cwd=newbuilddir, shell=True)
97 # Tried to used bitbake-layers add/remove but it requires recipe parsing and hence is too slow
98 subprocess.check_output("sed %s/conf/bblayers.conf -i -e 's#%s#%s#g'" % (newbuilddir, selftestdir, newselftestdir), cwd=newbuilddir, shell=True)
99 128
100 os.chdir(newbuilddir) 129 os.chdir(newbuilddir)
101 130
@@ -124,17 +153,11 @@ class OESelftestTestContext(OETestContext):
124 if processes: 153 if processes:
125 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite 154 from oeqa.core.utils.concurrencytest import ConcurrentTestSuite
126 155
127 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 156 return ConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
128 else: 157 else:
129 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir) 158 return NonConcurrentTestSuite(suites, processes, self.setup_builddir, self.removebuilddir, self.bb_vars)
130 159
131 def runTests(self, processes=None, machine=None, skips=[]): 160 def runTests(self, processes=None, machine=None, skips=[]):
132 if machine:
133 self.custommachine = machine
134 if machine == 'random':
135 self.custommachine = choice(self.machines)
136 self.logger.info('Run tests with custom MACHINE set to: %s' % \
137 self.custommachine)
138 return super(OESelftestTestContext, self).runTests(processes, skips) 161 return super(OESelftestTestContext, self).runTests(processes, skips)
139 162
140 def listTests(self, display_type, machine=None): 163 def listTests(self, display_type, machine=None):
@@ -154,9 +177,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
154 group.add_argument('-a', '--run-all-tests', default=False, 177 group.add_argument('-a', '--run-all-tests', default=False,
155 action="store_true", dest="run_all_tests", 178 action="store_true", dest="run_all_tests",
156 help='Run all (unhidden) tests') 179 help='Run all (unhidden) tests')
157 group.add_argument('-R', '--skip-tests', required=False, action='store',
158 nargs='+', dest="skips", default=None,
159 help='Run all (unhidden) tests except the ones specified. Format should be <module>[.<class>[.<test_method>]]')
160 group.add_argument('-r', '--run-tests', required=False, action='store', 180 group.add_argument('-r', '--run-tests', required=False, action='store',
161 nargs='+', dest="run_tests", default=None, 181 nargs='+', dest="run_tests", default=None,
162 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>') 182 help='Select what tests to run (modules, classes or test methods). Format should be: <module>.<class>.<test_method>')
@@ -171,11 +191,26 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
171 action="store_true", default=False, 191 action="store_true", default=False,
172 help='List all available tests.') 192 help='List all available tests.')
173 193
174 parser.add_argument('-j', '--num-processes', dest='processes', action='store', 194 parser.add_argument('-R', '--skip-tests', required=False, action='store',
175 type=int, help="number of processes to execute in parallel with") 195 nargs='+', dest="skips", default=None,
196 help='Skip the tests specified. Format should be <module>[.<class>[.<test_method>]]')
197
198 def check_parallel_support(parameter):
199 if not parameter.isdigit():
200 import argparse
201 raise argparse.ArgumentTypeError("argument -j/--num-processes: invalid int value: '%s' " % str(parameter))
202
203 processes = int(parameter)
204 if processes:
205 try:
206 import testtools, subunit
207 except ImportError:
208 print("Failed to import testtools or subunit, the testcases will run serially")
209 processes = None
210 return processes
176 211
177 parser.add_argument('--machine', required=False, choices=['random', 'all'], 212 parser.add_argument('-j', '--num-processes', dest='processes', action='store',
178 help='Run tests on different machines (random/all).') 213 type=check_parallel_support, help="number of processes to execute in parallel with")
179 214
180 parser.add_argument('-t', '--select-tag', dest="select_tags", 215 parser.add_argument('-t', '--select-tag', dest="select_tags",
181 action='append', default=None, 216 action='append', default=None,
@@ -191,20 +226,6 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
191 parser.add_argument('-v', '--verbose', action='store_true') 226 parser.add_argument('-v', '--verbose', action='store_true')
192 parser.set_defaults(func=self.run) 227 parser.set_defaults(func=self.run)
193 228
194 def _get_available_machines(self):
195 machines = []
196
197 bbpath = self.tc_kwargs['init']['td']['BBPATH'].split(':')
198
199 for path in bbpath:
200 found_machines = glob.glob(os.path.join(path, 'conf', 'machine', '*.conf'))
201 if found_machines:
202 for i in found_machines:
203 # eg: '/home/<user>/poky/meta-intel/conf/machine/intel-core2-32.conf'
204 machines.append(os.path.splitext(os.path.basename(i))[0])
205
206 return machines
207
208 def _get_cases_paths(self, bbpath): 229 def _get_cases_paths(self, bbpath):
209 cases_paths = [] 230 cases_paths = []
210 for layer in bbpath: 231 for layer in bbpath:
@@ -235,11 +256,10 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
235 args.list_tests = 'name' 256 args.list_tests = 'name'
236 257
237 self.tc_kwargs['init']['td'] = bbvars 258 self.tc_kwargs['init']['td'] = bbvars
238 self.tc_kwargs['init']['machines'] = self._get_available_machines()
239 259
240 builddir = os.environ.get("BUILDDIR") 260 builddir = os.environ.get("BUILDDIR")
241 self.tc_kwargs['init']['config_paths'] = {} 261 self.tc_kwargs['init']['config_paths'] = {}
242 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer() 262 self.tc_kwargs['init']['config_paths']['testlayer_path'] = get_test_layer(bbvars["BBLAYERS"])
243 self.tc_kwargs['init']['config_paths']['builddir'] = builddir 263 self.tc_kwargs['init']['config_paths']['builddir'] = builddir
244 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf") 264 self.tc_kwargs['init']['config_paths']['localconf'] = os.path.join(builddir, "conf/local.conf")
245 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf") 265 self.tc_kwargs['init']['config_paths']['bblayers'] = os.path.join(builddir, "conf/bblayers.conf")
@@ -275,14 +295,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
275 os.chdir(builddir) 295 os.chdir(builddir)
276 296
277 if not "meta-selftest" in self.tc.td["BBLAYERS"]: 297 if not "meta-selftest" in self.tc.td["BBLAYERS"]:
278 self.tc.logger.warning("meta-selftest layer not found in BBLAYERS, adding it") 298 self.tc.logger.info("meta-selftest layer not found in BBLAYERS, adding it")
279 meta_selftestdir = os.path.join( 299 meta_selftestdir = os.path.join(
280 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest') 300 self.tc.td["BBLAYERS_FETCH_DIR"], 'meta-selftest')
281 if os.path.isdir(meta_selftestdir): 301 if os.path.isdir(meta_selftestdir):
282 runCmd("bitbake-layers add-layer %s" %meta_selftestdir) 302 runCmd("bitbake-layers add-layer %s" % meta_selftestdir)
283 # reload data is needed because a meta-selftest layer was add 303 # reload data is needed because a meta-selftest layer was add
284 self.tc.td = get_bb_vars() 304 self.tc.td = get_bb_vars()
285 self.tc.config_paths['testlayer_path'] = get_test_layer() 305 self.tc.config_paths['testlayer_path'] = get_test_layer(self.tc.td["BBLAYERS"])
286 else: 306 else:
287 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir) 307 self.tc.logger.error("could not locate meta-selftest in:\n%s" % meta_selftestdir)
288 raise OEQAPreRun 308 raise OEQAPreRun
@@ -320,8 +340,15 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
320 340
321 _add_layer_libs() 341 _add_layer_libs()
322 342
323 self.tc.logger.info("Running bitbake -e to test the configuration is valid/parsable") 343 self.tc.logger.info("Checking base configuration is valid/parsable")
324 runCmd("bitbake -e") 344
345 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
346 tinfoil.prepare(quiet=2, config_only=True)
347 d = tinfoil.config_data
348 vars = {}
349 vars['SSTATE_DIR'] = str(d.getVar('SSTATE_DIR'))
350 vars['BBLAYERS'] = str(d.getVar('BBLAYERS'))
351 self.tc.set_variables(vars)
325 352
326 def get_json_result_dir(self, args): 353 def get_json_result_dir(self, args):
327 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa') 354 json_result_dir = os.path.join(self.tc.td["LOG_DIR"], 'oeqa')
@@ -334,12 +361,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
334 import platform 361 import platform
335 from oeqa.utils.metadata import metadata_from_bb 362 from oeqa.utils.metadata import metadata_from_bb
336 metadata = metadata_from_bb() 363 metadata = metadata_from_bb()
364 oeselftest_metadata = get_oeselftest_metadata(args)
337 configuration = {'TEST_TYPE': 'oeselftest', 365 configuration = {'TEST_TYPE': 'oeselftest',
338 'STARTTIME': args.test_start_time, 366 'STARTTIME': args.test_start_time,
339 'MACHINE': self.tc.td["MACHINE"], 367 'MACHINE': self.tc.td["MACHINE"],
340 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'), 368 'HOST_DISTRO': oe.lsb.distro_identifier().replace(' ', '-'),
341 'HOST_NAME': metadata['hostname'], 369 'HOST_NAME': metadata['hostname'],
342 'LAYERS': metadata['layers']} 370 'LAYERS': metadata['layers'],
371 'OESELFTEST_METADATA': oeselftest_metadata}
343 return configuration 372 return configuration
344 373
345 def get_result_id(self, configuration): 374 def get_result_id(self, configuration):
@@ -374,37 +403,14 @@ class OESelftestTestContextExecutor(OETestContextExecutor):
374 403
375 rc = None 404 rc = None
376 try: 405 try:
377 if args.machine: 406 rc = self._internal_run(logger, args)
378 logger.info('Custom machine mode enabled. MACHINE set to %s' %
379 args.machine)
380
381 if args.machine == 'all':
382 results = []
383 for m in self.tc_kwargs['init']['machines']:
384 self.tc_kwargs['run']['machine'] = m
385 results.append(self._internal_run(logger, args))
386
387 # XXX: the oe-selftest script only needs to know if one
388 # machine run fails
389 for r in results:
390 rc = r
391 if not r.wasSuccessful():
392 break
393
394 else:
395 self.tc_kwargs['run']['machine'] = args.machine
396 return self._internal_run(logger, args)
397
398 else:
399 self.tc_kwargs['run']['machine'] = args.machine
400 rc = self._internal_run(logger, args)
401 finally: 407 finally:
402 config_paths = self.tc_kwargs['init']['config_paths'] 408 config_paths = self.tc_kwargs['init']['config_paths']
403 409
404 output_link = os.path.join(os.path.dirname(args.output_log), 410 output_link = os.path.join(os.path.dirname(args.output_log),
405 "%s-results.log" % self.name) 411 "%s-results.log" % self.name)
406 if os.path.lexists(output_link): 412 if os.path.lexists(output_link):
407 os.remove(output_link) 413 os.unlink(output_link)
408 os.symlink(args.output_log, output_link) 414 os.symlink(args.output_log, output_link)
409 415
410 return rc 416 return rc
diff --git a/meta/lib/oeqa/targetcontrol.py b/meta/lib/oeqa/targetcontrol.py
index 19f5a4ea7e..6e8b781973 100644
--- a/meta/lib/oeqa/targetcontrol.py
+++ b/meta/lib/oeqa/targetcontrol.py
@@ -7,17 +7,14 @@
7# This module is used by testimage.bbclass for setting up and controlling a target machine. 7# This module is used by testimage.bbclass for setting up and controlling a target machine.
8 8
9import os 9import os
10import shutil
11import subprocess 10import subprocess
12import bb 11import bb
13import traceback
14import sys
15import logging 12import logging
16from oeqa.utils.sshcontrol import SSHControl 13from oeqa.utils.sshcontrol import SSHControl
17from oeqa.utils.qemurunner import QemuRunner 14from oeqa.utils.qemurunner import QemuRunner
18from oeqa.utils.qemutinyrunner import QemuTinyRunner 15from oeqa.utils.qemutinyrunner import QemuTinyRunner
19from oeqa.utils.dump import TargetDumper 16from oeqa.utils.dump import TargetDumper
20from oeqa.controllers.testtargetloader import TestTargetLoader 17from oeqa.utils.dump import MonitorDumper
21from abc import ABCMeta, abstractmethod 18from abc import ABCMeta, abstractmethod
22 19
23class BaseTarget(object, metaclass=ABCMeta): 20class BaseTarget(object, metaclass=ABCMeta):
@@ -41,7 +38,7 @@ class BaseTarget(object, metaclass=ABCMeta):
41 if os.path.islink(sshloglink): 38 if os.path.islink(sshloglink):
42 os.unlink(sshloglink) 39 os.unlink(sshloglink)
43 os.symlink(self.sshlog, sshloglink) 40 os.symlink(self.sshlog, sshloglink)
44 self.logger.info("SSH log file: %s" % self.sshlog) 41 self.logger.info("SSH log file: %s" % self.sshlog)
45 42
46 @abstractmethod 43 @abstractmethod
47 def start(self, params=None, ssh=True, extra_bootparams=None): 44 def start(self, params=None, ssh=True, extra_bootparams=None):
@@ -106,8 +103,7 @@ class QemuTarget(BaseTarget):
106 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) 103 self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype)
107 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') 104 self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin')
108 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) 105 self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime)
109 dump_target_cmds = d.getVar("testimage_dump_target") 106 dump_monitor_cmds = d.getVar("testimage_dump_monitor")
110 dump_host_cmds = d.getVar("testimage_dump_host")
111 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR") 107 dump_dir = d.getVar("TESTIMAGE_DUMP_DIR")
112 if not dump_dir: 108 if not dump_dir:
113 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump') 109 dump_dir = os.path.join(d.getVar('LOG_DIR'), 'runtime-hostdump')
@@ -131,6 +127,7 @@ class QemuTarget(BaseTarget):
131 logfile = self.qemulog, 127 logfile = self.qemulog,
132 kernel = self.kernel, 128 kernel = self.kernel,
133 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 129 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
130 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
134 logger = logger) 131 logger = logger)
135 else: 132 else:
136 self.runner = QemuRunner(machine=d.getVar("MACHINE"), 133 self.runner = QemuRunner(machine=d.getVar("MACHINE"),
@@ -142,11 +139,13 @@ class QemuTarget(BaseTarget):
142 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), 139 boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")),
143 use_kvm = use_kvm, 140 use_kvm = use_kvm,
144 dump_dir = dump_dir, 141 dump_dir = dump_dir,
145 dump_host_cmds = d.getVar("testimage_dump_host"),
146 logger = logger, 142 logger = logger,
143 tmpfsdir = d.getVar("RUNQEMU_TMPFS_DIR"),
147 serial_ports = len(d.getVar("SERIAL_CONSOLES").split())) 144 serial_ports = len(d.getVar("SERIAL_CONSOLES").split()))
148 145
149 self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) 146 self.monitor_dumper = MonitorDumper(dump_monitor_cmds, dump_dir, self.runner)
147 if (self.monitor_dumper):
148 self.monitor_dumper.create_dir("qmp")
150 149
151 def deploy(self): 150 def deploy(self):
152 bb.utils.mkdirhier(self.testdir) 151 bb.utils.mkdirhier(self.testdir)
@@ -156,7 +155,7 @@ class QemuTarget(BaseTarget):
156 os.unlink(qemuloglink) 155 os.unlink(qemuloglink)
157 os.symlink(self.qemulog, qemuloglink) 156 os.symlink(self.qemulog, qemuloglink)
158 157
159 self.logger.info("rootfs file: %s" % self.rootfs) 158 self.logger.info("rootfs file: %s" % self.rootfs)
160 self.logger.info("Qemu log file: %s" % self.qemulog) 159 self.logger.info("Qemu log file: %s" % self.qemulog)
161 super(QemuTarget, self).deploy() 160 super(QemuTarget, self).deploy()
162 161
@@ -198,7 +197,7 @@ class QemuTarget(BaseTarget):
198 self.server_ip = self.runner.server_ip 197 self.server_ip = self.runner.server_ip
199 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog) 198 self.connection = SSHControl(ip=self.ip, logfile=self.sshlog)
200 else: 199 else:
201 raise RuntimError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn) 200 raise RuntimeError("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn)
202 201
203 def run_serial(self, command, timeout=60): 202 def run_serial(self, command, timeout=60):
204 return self.runner.run_serial(command, timeout=timeout) 203 return self.runner.run_serial(command, timeout=timeout)
diff --git a/meta/lib/oeqa/utils/__init__.py b/meta/lib/oeqa/utils/__init__.py
index 6d1ec4cb99..53bdcbf266 100644
--- a/meta/lib/oeqa/utils/__init__.py
+++ b/meta/lib/oeqa/utils/__init__.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4# Enable other layers to have modules in the same named directory 6# Enable other layers to have modules in the same named directory
@@ -88,3 +90,10 @@ def load_test_components(logger, executor):
88 "_executor_class defined." % (comp_name, comp_context)) 90 "_executor_class defined." % (comp_name, comp_context))
89 91
90 return components 92 return components
93
94def get_json_result_dir(d):
95 json_result_dir = os.path.join(d.getVar("LOG_DIR"), 'oeqa')
96 custom_json_result_dir = d.getVar("OEQA_JSON_RESULT_DIR")
97 if custom_json_result_dir:
98 json_result_dir = custom_json_result_dir
99 return json_result_dir \ No newline at end of file
diff --git a/meta/lib/oeqa/utils/buildproject.py b/meta/lib/oeqa/utils/buildproject.py
index e6d80cc8dc..dfb9661868 100644
--- a/meta/lib/oeqa/utils/buildproject.py
+++ b/meta/lib/oeqa/utils/buildproject.py
@@ -18,6 +18,7 @@ class BuildProject(metaclass=ABCMeta):
18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): 18 def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None):
19 self.uri = uri 19 self.uri = uri
20 self.archive = os.path.basename(uri) 20 self.archive = os.path.basename(uri)
21 self.tempdirobj = None
21 if not tmpdir: 22 if not tmpdir:
22 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-') 23 self.tempdirobj = tempfile.TemporaryDirectory(prefix='buildproject-')
23 tmpdir = self.tempdirobj.name 24 tmpdir = self.tempdirobj.name
@@ -57,6 +58,8 @@ class BuildProject(metaclass=ABCMeta):
57 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 58 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
58 59
59 def clean(self): 60 def clean(self):
61 if self.tempdirobj:
62 self.tempdirobj.cleanup()
60 if not self.needclean: 63 if not self.needclean:
61 return 64 return
62 self._run('rm -rf %s' % self.targetdir) 65 self._run('rm -rf %s' % self.targetdir)
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index a71c16ab14..575e380017 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -8,11 +8,8 @@
8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest 8# This module is mainly used by scripts/oe-selftest and modules under meta/oeqa/selftest
9# It provides a class and methods for running commands on the host in a convienent way for tests. 9# It provides a class and methods for running commands on the host in a convienent way for tests.
10 10
11
12
13import os 11import os
14import sys 12import sys
15import signal
16import subprocess 13import subprocess
17import threading 14import threading
18import time 15import time
@@ -21,6 +18,7 @@ from oeqa.utils import CommandError
21from oeqa.utils import ftools 18from oeqa.utils import ftools
22import re 19import re
23import contextlib 20import contextlib
21import errno
24# Export test doesn't require bb 22# Export test doesn't require bb
25try: 23try:
26 import bb 24 import bb
@@ -85,7 +83,7 @@ class Command(object):
85 except OSError as ex: 83 except OSError as ex:
86 # It's not an error when the command does not consume all 84 # It's not an error when the command does not consume all
87 # of our data. subprocess.communicate() also ignores that. 85 # of our data. subprocess.communicate() also ignores that.
88 if ex.errno != EPIPE: 86 if ex.errno != errno.EPIPE:
89 raise 87 raise
90 88
91 # We write in a separate thread because then we can read 89 # We write in a separate thread because then we can read
@@ -117,7 +115,7 @@ class Command(object):
117 else: 115 else:
118 deadline = time.time() + self.timeout 116 deadline = time.time() + self.timeout
119 for thread in self.threads: 117 for thread in self.threads:
120 timeout = deadline - time.time() 118 timeout = deadline - time.time()
121 if timeout < 0: 119 if timeout < 0:
122 timeout = 0 120 timeout = 0
123 thread.join(timeout) 121 thread.join(timeout)
@@ -168,18 +166,22 @@ class Result(object):
168 166
169 167
170def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True, 168def runCmd(command, ignore_status=False, timeout=None, assert_error=True, sync=True,
171 native_sysroot=None, limit_exc_output=0, output_log=None, **options): 169 native_sysroot=None, target_sys=None, limit_exc_output=0, output_log=None, **options):
172 result = Result() 170 result = Result()
173 171
174 if native_sysroot: 172 if native_sysroot:
175 extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ 173 new_env = dict(options.get('env', os.environ))
176 (native_sysroot, native_sysroot, native_sysroot) 174 paths = new_env["PATH"].split(":")
177 extra_libpaths = "%s/lib:%s/usr/lib" % \ 175 paths = [
178 (native_sysroot, native_sysroot) 176 os.path.join(native_sysroot, "bin"),
179 nenv = dict(options.get('env', os.environ)) 177 os.path.join(native_sysroot, "sbin"),
180 nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') 178 os.path.join(native_sysroot, "usr", "bin"),
181 nenv['LD_LIBRARY_PATH'] = extra_libpaths + ':' + nenv.get('LD_LIBRARY_PATH', '') 179 os.path.join(native_sysroot, "usr", "sbin"),
182 options['env'] = nenv 180 ] + paths
181 if target_sys:
182 paths = [os.path.join(native_sysroot, "usr", "bin", target_sys)] + paths
183 new_env["PATH"] = ":".join(paths)
184 options['env'] = new_env
183 185
184 cmd = Command(command, timeout=timeout, output_log=output_log, **options) 186 cmd = Command(command, timeout=timeout, output_log=output_log, **options)
185 cmd.run() 187 cmd.run()
@@ -283,8 +285,10 @@ def get_bb_vars(variables=None, target=None, postconfig=None):
283def get_bb_var(var, target=None, postconfig=None): 285def get_bb_var(var, target=None, postconfig=None):
284 return get_bb_vars([var], target, postconfig)[var] 286 return get_bb_vars([var], target, postconfig)[var]
285 287
286def get_test_layer(): 288def get_test_layer(bblayers=None):
287 layers = get_bb_var("BBLAYERS").split() 289 if bblayers is None:
290 bblayers = get_bb_var("BBLAYERS")
291 layers = bblayers.split()
288 testlayer = None 292 testlayer = None
289 for l in layers: 293 for l in layers:
290 if '~' in l: 294 if '~' in l:
@@ -296,6 +300,7 @@ def get_test_layer():
296 300
297def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'): 301def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec='recipes-*/*'):
298 os.makedirs(os.path.join(templayerdir, 'conf')) 302 os.makedirs(os.path.join(templayerdir, 'conf'))
303 corenames = get_bb_var('LAYERSERIES_CORENAMES')
299 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f: 304 with open(os.path.join(templayerdir, 'conf', 'layer.conf'), 'w') as f:
300 f.write('BBPATH .= ":${LAYERDIR}"\n') 305 f.write('BBPATH .= ":${LAYERDIR}"\n')
301 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec) 306 f.write('BBFILES += "${LAYERDIR}/%s/*.bb \\' % recipepathspec)
@@ -304,7 +309,7 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec=
304 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername) 309 f.write('BBFILE_PATTERN_%s = "^${LAYERDIR}/"\n' % templayername)
305 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority)) 310 f.write('BBFILE_PRIORITY_%s = "%d"\n' % (templayername, priority))
306 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername) 311 f.write('BBFILE_PATTERN_IGNORE_EMPTY_%s = "1"\n' % templayername)
307 f.write('LAYERSERIES_COMPAT_%s = "${LAYERSERIES_COMPAT_core}"\n' % templayername) 312 f.write('LAYERSERIES_COMPAT_%s = "%s"\n' % (templayername, corenames))
308 313
309@contextlib.contextmanager 314@contextlib.contextmanager
310def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): 315def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
index aabf4110cb..ea90164e5e 100644
--- a/meta/lib/oeqa/utils/decorators.py
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -16,91 +16,6 @@ import threading
16import signal 16import signal
17from functools import wraps 17from functools import wraps
18 18
19#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
20class getResults(object):
21 def __init__(self):
22 #dynamically determine the unittest.case frame and use it to get the name of the test method
23 ident = threading.current_thread().ident
24 upperf = sys._current_frames()[ident]
25 while (upperf.f_globals['__name__'] != 'unittest.case'):
26 upperf = upperf.f_back
27
28 def handleList(items):
29 ret = []
30 # items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
31 for i in items:
32 s = i[0].id()
33 #Handle the _ErrorHolder objects from skipModule failures
34 if "setUpModule (" in s:
35 ret.append(s.replace("setUpModule (", "").replace(")",""))
36 else:
37 ret.append(s)
38 # Append also the test without the full path
39 testname = s.split('.')[-1]
40 if testname:
41 ret.append(testname)
42 return ret
43 self.faillist = handleList(upperf.f_locals['result'].failures)
44 self.errorlist = handleList(upperf.f_locals['result'].errors)
45 self.skiplist = handleList(upperf.f_locals['result'].skipped)
46
47 def getFailList(self):
48 return self.faillist
49
50 def getErrorList(self):
51 return self.errorlist
52
53 def getSkipList(self):
54 return self.skiplist
55
56class skipIfFailure(object):
57
58 def __init__(self,testcase):
59 self.testcase = testcase
60
61 def __call__(self,f):
62 @wraps(f)
63 def wrapped_f(*args, **kwargs):
64 res = getResults()
65 if self.testcase in (res.getFailList() or res.getErrorList()):
66 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
67 return f(*args, **kwargs)
68 wrapped_f.__name__ = f.__name__
69 return wrapped_f
70
71class skipIfSkipped(object):
72
73 def __init__(self,testcase):
74 self.testcase = testcase
75
76 def __call__(self,f):
77 @wraps(f)
78 def wrapped_f(*args, **kwargs):
79 res = getResults()
80 if self.testcase in res.getSkipList():
81 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
82 return f(*args, **kwargs)
83 wrapped_f.__name__ = f.__name__
84 return wrapped_f
85
86class skipUnlessPassed(object):
87
88 def __init__(self,testcase):
89 self.testcase = testcase
90
91 def __call__(self,f):
92 @wraps(f)
93 def wrapped_f(*args, **kwargs):
94 res = getResults()
95 if self.testcase in res.getSkipList() or \
96 self.testcase in res.getFailList() or \
97 self.testcase in res.getErrorList():
98 raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
99 return f(*args, **kwargs)
100 wrapped_f.__name__ = f.__name__
101 wrapped_f._depends_on = self.testcase
102 return wrapped_f
103
104class testcase(object): 19class testcase(object):
105 def __init__(self, test_case): 20 def __init__(self, test_case):
106 self.test_case = test_case 21 self.test_case = test_case
diff --git a/meta/lib/oeqa/utils/dump.py b/meta/lib/oeqa/utils/dump.py
index 09a44329e0..d4d271369f 100644
--- a/meta/lib/oeqa/utils/dump.py
+++ b/meta/lib/oeqa/utils/dump.py
@@ -1,9 +1,12 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import os 7import os
6import sys 8import sys
9import json
7import errno 10import errno
8import datetime 11import datetime
9import itertools 12import itertools
@@ -17,6 +20,7 @@ class BaseDumper(object):
17 # Some testing doesn't inherit testimage, so it is needed 20 # Some testing doesn't inherit testimage, so it is needed
18 # to set some defaults. 21 # to set some defaults.
19 self.parent_dir = parent_dir 22 self.parent_dir = parent_dir
23 self.dump_dir = parent_dir
20 dft_cmds = """ top -bn1 24 dft_cmds = """ top -bn1
21 iostat -x -z -N -d -p ALL 20 2 25 iostat -x -z -N -d -p ALL 20 2
22 ps -ef 26 ps -ef
@@ -46,11 +50,11 @@ class BaseDumper(object):
46 raise err 50 raise err
47 self.dump_dir = dump_dir 51 self.dump_dir = dump_dir
48 52
49 def _write_dump(self, command, output): 53 def _construct_filename(self, command):
50 if isinstance(self, HostDumper): 54 if isinstance(self, TargetDumper):
51 prefix = "host"
52 elif isinstance(self, TargetDumper):
53 prefix = "target" 55 prefix = "target"
56 elif isinstance(self, MonitorDumper):
57 prefix = "qmp"
54 else: 58 else:
55 prefix = "unknown" 59 prefix = "unknown"
56 for i in itertools.count(): 60 for i in itertools.count():
@@ -58,41 +62,80 @@ class BaseDumper(object):
58 fullname = os.path.join(self.dump_dir, filename) 62 fullname = os.path.join(self.dump_dir, filename)
59 if not os.path.exists(fullname): 63 if not os.path.exists(fullname):
60 break 64 break
61 with open(fullname, 'w') as dump_file: 65 return fullname
62 dump_file.write(output)
63
64
65class HostDumper(BaseDumper):
66 """ Class to get dumps from the host running the tests """
67
68 def __init__(self, cmds, parent_dir):
69 super(HostDumper, self).__init__(cmds, parent_dir)
70 66
71 def dump_host(self, dump_dir=""): 67 def _write_dump(self, command, output):
72 if dump_dir: 68 fullname = self._construct_filename(command)
73 self.dump_dir = dump_dir 69 os.makedirs(os.path.dirname(fullname), exist_ok=True)
74 env = os.environ.copy() 70 if isinstance(self, MonitorDumper):
75 env['PATH'] = '/usr/sbin:/sbin:/usr/bin:/bin' 71 with open(fullname, 'w') as json_file:
76 env['COLUMNS'] = '9999' 72 json.dump(output, json_file, indent=4)
77 for cmd in self.cmds: 73 else:
78 result = runCmd(cmd, ignore_status=True, env=env) 74 with open(fullname, 'w') as dump_file:
79 self._write_dump(cmd.split()[0], result.output) 75 dump_file.write(output)
80 76
81class TargetDumper(BaseDumper): 77class TargetDumper(BaseDumper):
82 """ Class to get dumps from target, it only works with QemuRunner """ 78 """ Class to get dumps from target, it only works with QemuRunner.
79 Will give up permanently after 5 errors from running commands over
80 serial console. This helps to end testing when target is really dead, hanging
81 or unresponsive.
82 """
83 83
84 def __init__(self, cmds, parent_dir, runner): 84 def __init__(self, cmds, parent_dir, runner):
85 super(TargetDumper, self).__init__(cmds, parent_dir) 85 super(TargetDumper, self).__init__(cmds, parent_dir)
86 self.runner = runner 86 self.runner = runner
87 self.errors = 0
87 88
88 def dump_target(self, dump_dir=""): 89 def dump_target(self, dump_dir=""):
90 if self.errors >= 5:
91 print("Too many errors when dumping data from target, assuming it is dead! Will not dump data anymore!")
92 return
89 if dump_dir: 93 if dump_dir:
90 self.dump_dir = dump_dir 94 self.dump_dir = dump_dir
91 for cmd in self.cmds: 95 for cmd in self.cmds:
92 # We can continue with the testing if serial commands fail 96 # We can continue with the testing if serial commands fail
93 try: 97 try:
94 (status, output) = self.runner.run_serial(cmd) 98 (status, output) = self.runner.run_serial(cmd)
99 if status == 0:
100 self.errors = self.errors + 1
95 self._write_dump(cmd.split()[0], output) 101 self._write_dump(cmd.split()[0], output)
96 except: 102 except:
103 self.errors = self.errors + 1
97 print("Tried to dump info from target but " 104 print("Tried to dump info from target but "
98 "serial console failed") 105 "serial console failed")
106 print("Failed CMD: %s" % (cmd))
107
108class MonitorDumper(BaseDumper):
109 """ Class to get dumps via the Qemu Monitor, it only works with QemuRunner
110 Will stop completely if there are more than 5 errors when dumping monitor data.
111 This helps to end testing when target is really dead, hanging or unresponsive.
112 """
113
114 def __init__(self, cmds, parent_dir, runner):
115 super(MonitorDumper, self).__init__(cmds, parent_dir)
116 self.runner = runner
117 self.errors = 0
118
119 def dump_monitor(self, dump_dir=""):
120 if self.runner is None:
121 return
122 if dump_dir:
123 self.dump_dir = dump_dir
124 if self.errors >= 5:
125 print("Too many errors when dumping data from qemu monitor, assuming it is dead! Will not dump data anymore!")
126 return
127 for cmd in self.cmds:
128 cmd_name = cmd.split()[0]
129 try:
130 if len(cmd.split()) > 1:
131 cmd_args = cmd.split()[1]
132 if "%s" in cmd_args:
133 filename = self._construct_filename(cmd_name)
134 cmd_data = json.loads(cmd_args % (filename))
135 output = self.runner.run_monitor(cmd_name, cmd_data)
136 else:
137 output = self.runner.run_monitor(cmd_name)
138 self._write_dump(cmd_name, output)
139 except Exception as e:
140 self.errors = self.errors + 1
141 print("Failed to dump QMP CMD: %s with\nException: %s" % (cmd_name, e))
diff --git a/meta/lib/oeqa/utils/ftools.py b/meta/lib/oeqa/utils/ftools.py
index 3093419cc7..a50aaa84c2 100644
--- a/meta/lib/oeqa/utils/ftools.py
+++ b/meta/lib/oeqa/utils/ftools.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/gitarchive.py b/meta/lib/oeqa/utils/gitarchive.py
index 6e8040eb5c..10cb267dfa 100644
--- a/meta/lib/oeqa/utils/gitarchive.py
+++ b/meta/lib/oeqa/utils/gitarchive.py
@@ -100,9 +100,44 @@ def git_commit_data(repo, data_dir, branch, message, exclude, notes, log):
100 if os.path.exists(tmp_index): 100 if os.path.exists(tmp_index):
101 os.unlink(tmp_index) 101 os.unlink(tmp_index)
102 102
103def get_tags(repo, log, pattern=None, url=None):
104 """ Fetch remote tags from current repository
105
106 A pattern can be provided to filter returned tags list
107 An URL can be provided if local repository has no valid remote configured
108 """
109
110 base_cmd = ['ls-remote', '--refs', '--tags', '-q']
111 cmd = base_cmd.copy()
112
113 # First try to fetch tags from repository configured remote
114 cmd.append('origin')
115 if pattern:
116 cmd.append("refs/tags/"+pattern)
117 try:
118 tags_refs = repo.run_cmd(cmd)
119 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
120 except GitError as e:
121 # If it fails, retry with repository url if one is provided
122 if url:
123 log.info("No remote repository configured, use provided url")
124 cmd = base_cmd.copy()
125 cmd.append(url)
126 if pattern:
127 cmd.append(pattern)
128 tags_refs = repo.run_cmd(cmd)
129 tags = ["".join(d.split()[1].split('/', 2)[2:]) for d in tags_refs.splitlines()]
130 else:
131 log.info("Read local tags only, some remote tags may be missed")
132 cmd = ["tag"]
133 if pattern:
134 cmd += ["-l", pattern]
135 tags = repo.run_cmd(cmd).splitlines()
136
137 return tags
103 138
104def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern, 139def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
105 keywords): 140 url, log, keywords):
106 """Generate tag name and message, with support for running id number""" 141 """Generate tag name and message, with support for running id number"""
107 keyws = keywords.copy() 142 keyws = keywords.copy()
108 # Tag number is handled specially: if not defined, we autoincrement it 143 # Tag number is handled specially: if not defined, we autoincrement it
@@ -116,7 +151,7 @@ def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern,
116 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})') 151 tag_re = tag_re.format(tag_number='(?P<tag_number>[0-9]{1,5})')
117 152
118 keyws['tag_number'] = 0 153 keyws['tag_number'] = 0
119 for existing_tag in repo.run_cmd('tag').splitlines(): 154 for existing_tag in get_tags(repo, log, url=url):
120 match = re.match(tag_re, existing_tag) 155 match = re.match(tag_re, existing_tag)
121 156
122 if match and int(match.group('tag_number')) >= keyws['tag_number']: 157 if match and int(match.group('tag_number')) >= keyws['tag_number']:
@@ -143,7 +178,8 @@ def gitarchive(data_dir, git_dir, no_create, bare, commit_msg_subject, commit_ms
143 if not no_tag and tagname: 178 if not no_tag and tagname:
144 tag_name, tag_msg = expand_tag_strings(data_repo, tagname, 179 tag_name, tag_msg = expand_tag_strings(data_repo, tagname,
145 tag_msg_subject, 180 tag_msg_subject,
146 tag_msg_body, keywords) 181 tag_msg_body,
182 push, log, keywords)
147 183
148 # Commit data 184 # Commit data
149 commit = git_commit_data(data_repo, data_dir, branch_name, 185 commit = git_commit_data(data_repo, data_dir, branch_name,
@@ -181,7 +217,7 @@ def get_test_runs(log, repo, tag_name, **kwargs):
181 217
182 # Get a list of all matching tags 218 # Get a list of all matching tags
183 tag_pattern = tag_name.format(**str_fields) 219 tag_pattern = tag_name.format(**str_fields)
184 tags = repo.run_cmd(['tag', '-l', tag_pattern]).splitlines() 220 tags = get_tags(repo, log, pattern=tag_pattern)
185 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern) 221 log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern)
186 222
187 # Parse undefined fields from tag names 223 # Parse undefined fields from tag names
@@ -199,6 +235,8 @@ def get_test_runs(log, repo, tag_name, **kwargs):
199 revs = [] 235 revs = []
200 for tag in tags: 236 for tag in tags:
201 m = tag_re.match(tag) 237 m = tag_re.match(tag)
238 if not m:
239 continue
202 groups = m.groupdict() 240 groups = m.groupdict()
203 revs.append([groups[f] for f in undef_fields] + [tag]) 241 revs.append([groups[f] for f in undef_fields] + [tag])
204 242
@@ -219,7 +257,15 @@ def get_test_revs(log, repo, tag_name, **kwargs):
219 if not commit in revs: 257 if not commit in revs:
220 revs[commit] = TestedRev(commit, commit_num, [tag]) 258 revs[commit] = TestedRev(commit, commit_num, [tag])
221 else: 259 else:
222 assert commit_num == revs[commit].commit_number, "Commit numbers do not match" 260 if commit_num != revs[commit].commit_number:
261 # Historically we have incorrect commit counts of '1' in the repo so fix these up
262 if int(revs[commit].commit_number) < 5:
263 tags = revs[commit].tags
264 revs[commit] = TestedRev(commit, commit_num, [tags])
265 elif int(commit_num) < 5:
266 pass
267 else:
268 sys.exit("Commit numbers for commit %s don't match (%s vs %s)" % (commit, commit_num, revs[commit].commit_number))
223 revs[commit].tags.append(tag) 269 revs[commit].tags.append(tag)
224 270
225 # Return in sorted table 271 # Return in sorted table
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
index 58d3c3b3f8..80752c1377 100644
--- a/meta/lib/oeqa/utils/httpserver.py
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -1,11 +1,13 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import http.server 7import http.server
8import logging
6import multiprocessing 9import multiprocessing
7import os 10import os
8import traceback
9import signal 11import signal
10from socketserver import ThreadingMixIn 12from socketserver import ThreadingMixIn
11 13
@@ -13,20 +15,24 @@ class HTTPServer(ThreadingMixIn, http.server.HTTPServer):
13 15
14 def server_start(self, root_dir, logger): 16 def server_start(self, root_dir, logger):
15 os.chdir(root_dir) 17 os.chdir(root_dir)
18 self.logger = logger
16 self.serve_forever() 19 self.serve_forever()
17 20
18class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler): 21class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
19 22
20 def log_message(self, format_str, *args): 23 def log_message(self, format_str, *args):
21 pass 24 self.server.logger.info(format_str, *args)
22 25
23class HTTPService(object): 26class HTTPService:
24 27
25 def __init__(self, root_dir, host='', port=0, logger=None): 28 def __init__(self, root_dir, host='', port=0, logger=None):
26 self.root_dir = root_dir 29 self.root_dir = root_dir
27 self.host = host 30 self.host = host
28 self.port = port 31 self.port = port
29 self.logger = logger 32 if logger:
33 self.logger = logger.getChild("HTTPService")
34 else:
35 self.logger = logging.getLogger("HTTPService")
30 36
31 def start(self): 37 def start(self):
32 if not os.path.exists(self.root_dir): 38 if not os.path.exists(self.root_dir):
@@ -38,6 +44,12 @@ class HTTPService(object):
38 self.port = self.server.server_port 44 self.port = self.server.server_port
39 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger]) 45 self.process = multiprocessing.Process(target=self.server.server_start, args=[self.root_dir, self.logger])
40 46
47 def handle_error(self, request, client_address):
48 import traceback
49 exception = traceback.format_exc()
50 self.logger.warn("Exception when handling %s: %s" % (request, exception))
51 self.server.handle_error = handle_error
52
41 # The signal handler from testimage.bbclass can cause deadlocks here 53 # The signal handler from testimage.bbclass can cause deadlocks here
42 # if the HTTPServer is terminated before it can restore the standard 54 # if the HTTPServer is terminated before it can restore the standard
43 #signal behaviour 55 #signal behaviour
@@ -47,7 +59,7 @@ class HTTPService(object):
47 signal.signal(signal.SIGTERM, orig) 59 signal.signal(signal.SIGTERM, orig)
48 60
49 if self.logger: 61 if self.logger:
50 self.logger.info("Started HTTPService on %s:%s" % (self.host, self.port)) 62 self.logger.info("Started HTTPService for %s on %s:%s" % (self.root_dir, self.host, self.port))
51 63
52 64
53 def stop(self): 65 def stop(self):
@@ -59,3 +71,10 @@ class HTTPService(object):
59 if self.logger: 71 if self.logger:
60 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port)) 72 self.logger.info("Stopped HTTPService on %s:%s" % (self.host, self.port))
61 73
74if __name__ == "__main__":
75 import sys, logging
76
77 logger = logging.getLogger(__name__)
78 logging.basicConfig(level=logging.DEBUG)
79 httpd = HTTPService(sys.argv[1], port=8888, logger=logger)
80 httpd.start()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
index 60e16d500e..496d9e0c90 100644
--- a/meta/lib/oeqa/utils/logparser.py
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -1,8 +1,10 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
5import sys 7import enum
6import os 8import os
7import re 9import re
8 10
@@ -42,6 +44,8 @@ class PtestParser(object):
42 result = section_regex['begin'].search(line) 44 result = section_regex['begin'].search(line)
43 if result: 45 if result:
44 current_section['name'] = result.group(1) 46 current_section['name'] = result.group(1)
47 if current_section['name'] not in self.results:
48 self.results[current_section['name']] = {}
45 continue 49 continue
46 50
47 result = section_regex['end'].search(line) 51 result = section_regex['end'].search(line)
@@ -73,9 +77,10 @@ class PtestParser(object):
73 for t in test_regex: 77 for t in test_regex:
74 result = test_regex[t].search(line) 78 result = test_regex[t].search(line)
75 if result: 79 if result:
76 if current_section['name'] not in self.results: 80 try:
77 self.results[current_section['name']] = {} 81 self.results[current_section['name']][result.group(1).strip()] = t
78 self.results[current_section['name']][result.group(1).strip()] = t 82 except KeyError:
83 bb.warn("Result with no section: %s - %s" % (t, result.group(1).strip()))
79 84
80 # Python performance for repeatedly joining long strings is poor, do it all at once at the end. 85 # Python performance for repeatedly joining long strings is poor, do it all at once at the end.
81 # For 2.1 million lines in a log this reduces 18 hours to 12s. 86 # For 2.1 million lines in a log this reduces 18 hours to 12s.
@@ -101,30 +106,48 @@ class PtestParser(object):
101 f.write(status + ": " + test_name + "\n") 106 f.write(status + ": " + test_name + "\n")
102 107
103 108
104# ltp log parsing 109class LtpParser:
105class LtpParser(object): 110 """
106 def __init__(self): 111 Parse the machine-readable LTP log output into a ptest-friendly data structure.
107 self.results = {} 112 """
108 self.section = {'duration': "", 'log': ""}
109
110 def parse(self, logfile): 113 def parse(self, logfile):
111 test_regex = {} 114 results = {}
112 test_regex['PASSED'] = re.compile(r"PASS") 115 # Aaccumulate the duration here but as the log rounds quick tests down
113 test_regex['FAILED'] = re.compile(r"FAIL") 116 # to 0 seconds this is very much a lower bound. The caller can replace
114 test_regex['SKIPPED'] = re.compile(r"SKIP") 117 # the value.
115 118 section = {"duration": 0, "log": ""}
116 with open(logfile, errors='replace') as f: 119
120 class LtpExitCode(enum.IntEnum):
121 # Exit codes as defined in ltp/include/tst_res_flags.h
122 TPASS = 0 # Test passed flag
123 TFAIL = 1 # Test failed flag
124 TBROK = 2 # Test broken flag
125 TWARN = 4 # Test warning flag
126 TINFO = 16 # Test information flag
127 TCONF = 32 # Test not appropriate for configuration flag
128
129 with open(logfile, errors="replace") as f:
130 # Lines look like this:
131 # tag=cfs_bandwidth01 stime=1689762564 dur=0 exit=exited stat=32 core=no cu=0 cs=0
117 for line in f: 132 for line in f:
118 for t in test_regex: 133 if not line.startswith("tag="):
119 result = test_regex[t].search(line) 134 continue
120 if result:
121 self.results[line.split()[0].strip()] = t
122 135
123 for test in self.results: 136 values = dict(s.split("=") for s in line.strip().split())
124 result = self.results[test]
125 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
126 137
127 return self.results, self.section 138 section["duration"] += int(values["dur"])
139 exitcode = int(values["stat"])
140 if values["exit"] == "exited" and exitcode == LtpExitCode.TCONF:
141 # Exited normally with the "invalid configuration" code
142 results[values["tag"]] = "SKIPPED"
143 elif exitcode == LtpExitCode.TPASS:
144 # Successful exit
145 results[values["tag"]] = "PASSED"
146 else:
147 # Other exit
148 results[values["tag"]] = "FAILED"
149
150 return results, section
128 151
129 152
130# ltp Compliance log parsing 153# ltp Compliance log parsing
@@ -135,30 +158,27 @@ class LtpComplianceParser(object):
135 158
136 def parse(self, logfile): 159 def parse(self, logfile):
137 test_regex = {} 160 test_regex = {}
138 test_regex['PASSED'] = re.compile(r"^PASS") 161 test_regex['FAILED'] = re.compile(r"FAIL")
139 test_regex['FAILED'] = re.compile(r"^FAIL")
140 test_regex['SKIPPED'] = re.compile(r"(?:UNTESTED)|(?:UNSUPPORTED)")
141 162
142 section_regex = {} 163 section_regex = {}
143 section_regex['test'] = re.compile(r"^Testing") 164 section_regex['test'] = re.compile(r"^Executing")
144 165
145 with open(logfile, errors='replace') as f: 166 with open(logfile, errors='replace') as f:
167 name = logfile
168 result = "PASSED"
146 for line in f: 169 for line in f:
147 result = section_regex['test'].search(line) 170 regex_result = section_regex['test'].search(line)
148 if result: 171 if regex_result:
149 self.name = "" 172 name = line.split()[1].strip()
150 self.name = line.split()[1].strip()
151 self.results[self.name] = "PASSED"
152 failed = 0
153 173
154 failed_result = test_regex['FAILED'].search(line) 174 regex_result = test_regex['FAILED'].search(line)
155 if failed_result: 175 if regex_result:
156 failed = line.split()[1].strip() 176 result = "FAILED"
157 if int(failed) > 0: 177 self.results[name] = result
158 self.results[self.name] = "FAILED"
159 178
160 for test in self.results: 179 for test in self.results:
161 result = self.results[test] 180 result = self.results[test]
181 print (self.results)
162 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip())) 182 self.section['log'] = self.section['log'] + ("%s: %s\n" % (result.strip()[:-2], test.strip()))
163 183
164 return self.results, self.section 184 return self.results, self.section
diff --git a/meta/lib/oeqa/utils/metadata.py b/meta/lib/oeqa/utils/metadata.py
index 8013aa684d..15ec190c4a 100644
--- a/meta/lib/oeqa/utils/metadata.py
+++ b/meta/lib/oeqa/utils/metadata.py
@@ -27,9 +27,9 @@ def metadata_from_bb():
27 data_dict = get_bb_vars() 27 data_dict = get_bb_vars()
28 28
29 # Distro information 29 # Distro information
30 info_dict['distro'] = {'id': data_dict['DISTRO'], 30 info_dict['distro'] = {'id': data_dict.get('DISTRO', 'NODISTRO'),
31 'version_id': data_dict['DISTRO_VERSION'], 31 'version_id': data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'),
32 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} 32 'pretty_name': '%s %s' % (data_dict.get('DISTRO', 'NODISTRO'), data_dict.get('DISTRO_VERSION', 'NO_DISTRO_VERSION'))}
33 33
34 # Host distro information 34 # Host distro information
35 os_release = get_os_release() 35 os_release = get_os_release()
diff --git a/meta/lib/oeqa/utils/network.py b/meta/lib/oeqa/utils/network.py
index 59d01723a1..da4ffda9a9 100644
--- a/meta/lib/oeqa/utils/network.py
+++ b/meta/lib/oeqa/utils/network.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/nfs.py b/meta/lib/oeqa/utils/nfs.py
index a37686c914..903469bfee 100644
--- a/meta/lib/oeqa/utils/nfs.py
+++ b/meta/lib/oeqa/utils/nfs.py
@@ -1,4 +1,8 @@
1#
2# Copyright OpenEmbedded Contributors
3#
1# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
5#
2import os 6import os
3import sys 7import sys
4import tempfile 8import tempfile
@@ -8,7 +12,7 @@ from oeqa.utils.commands import bitbake, get_bb_var, Command
8from oeqa.utils.network import get_free_port 12from oeqa.utils.network import get_free_port
9 13
10@contextlib.contextmanager 14@contextlib.contextmanager
11def unfs_server(directory, logger = None): 15def unfs_server(directory, logger = None, udp = True):
12 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native") 16 unfs_sysroot = get_bb_var("RECIPE_SYSROOT_NATIVE", "unfs3-native")
13 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")): 17 if not os.path.exists(os.path.join(unfs_sysroot, "usr", "bin", "unfsd")):
14 # build native tool 18 # build native tool
@@ -22,11 +26,11 @@ def unfs_server(directory, logger = None):
22 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode()) 26 exports.write("{0} (rw,no_root_squash,no_all_squash,insecure)\n".format(directory).encode())
23 27
24 # find some ports for the server 28 # find some ports for the server
25 nfsport, mountport = get_free_port(udp = True), get_free_port(udp = True) 29 nfsport, mountport = get_free_port(udp), get_free_port(udp)
26 30
27 nenv = dict(os.environ) 31 nenv = dict(os.environ)
28 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '') 32 nenv['PATH'] = "{0}/sbin:{0}/usr/sbin:{0}/usr/bin:".format(unfs_sysroot) + nenv.get('PATH', '')
29 cmd = Command(["unfsd", "-d", "-p", "-N", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)], 33 cmd = Command(["unfsd", "-d", "-p", "-e", exports.name, "-n", str(nfsport), "-m", str(mountport)],
30 bg = True, env = nenv, output_log = logger) 34 bg = True, env = nenv, output_log = logger)
31 cmd.run() 35 cmd.run()
32 yield nfsport, mountport 36 yield nfsport, mountport
diff --git a/meta/lib/oeqa/utils/package_manager.py b/meta/lib/oeqa/utils/package_manager.py
index 6b67f22fdd..db799b64d6 100644
--- a/meta/lib/oeqa/utils/package_manager.py
+++ b/meta/lib/oeqa/utils/package_manager.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4 6
diff --git a/meta/lib/oeqa/utils/postactions.py b/meta/lib/oeqa/utils/postactions.py
new file mode 100644
index 0000000000..ecdddd2d40
--- /dev/null
+++ b/meta/lib/oeqa/utils/postactions.py
@@ -0,0 +1,98 @@
1#
2# Copyright OpenEmbedded Contributors
3#
4# SPDX-License-Identifier: MIT
5#
6
7# Run a set of actions after tests. The runner provides internal data
8# dictionary as well as test context to any action to run.
9
10from oeqa.utils import get_json_result_dir
11
12def create_artifacts_directory(d, tc):
13 import shutil
14
15 local_artifacts_dir = os.path.join(get_json_result_dir(d), "artifacts")
16 if os.path.isdir(local_artifacts_dir):
17 shutil.rmtree(local_artifacts_dir)
18
19 os.makedirs(local_artifacts_dir)
20
21##################################################################
22# Host/target statistics
23##################################################################
24
25def get_target_disk_usage(d, tc):
26 output_file = os.path.join(get_json_result_dir(d), "artifacts", "target_disk_usage.txt")
27 try:
28 (status, output) = tc.target.run('df -h')
29 with open(output_file, 'w') as f:
30 f.write(output)
31 f.write("\n")
32 except Exception as e:
33 bb.warn(f"Can not get target disk usage: {e}")
34
35def get_host_disk_usage(d, tc):
36 import subprocess
37
38 output_file = os.path.join(get_json_result_dir(d), "artifacts", "host_disk_usage.txt")
39 try:
40 with open(output_file, 'w') as f:
41 output = subprocess.run(['df', '-hl'], check=True, text=True, stdout=f, env={})
42 except Exception as e:
43 bb.warn(f"Can not get host disk usage: {e}")
44
45##################################################################
46# Artifacts retrieval
47##################################################################
48
49def get_artifacts_list(target, raw_list):
50 result = []
51 # Passed list may contains patterns in paths, expand them directly on target
52 for raw_path in raw_list.split():
53 cmd = f"for p in {raw_path}; do if [ -e $p ]; then echo $p; fi; done"
54 try:
55 status, output = target.run(cmd)
56 if status != 0 or not output:
57 raise Exception()
58 result += output.split()
59 except:
60 bb.note(f"No file/directory matching path {raw_path}")
61
62 return result
63
64def retrieve_test_artifacts(target, artifacts_list, target_dir):
65 local_artifacts_dir = os.path.join(target_dir, "artifacts")
66 for artifact_path in artifacts_list:
67 if not os.path.isabs(artifact_path):
68 bb.warn(f"{artifact_path} is not an absolute path")
69 continue
70 try:
71 dest_dir = os.path.join(local_artifacts_dir, os.path.dirname(artifact_path[1:]))
72 os.makedirs(dest_dir, exist_ok=True)
73 target.copyFrom(artifact_path, dest_dir)
74 except Exception as e:
75 bb.warn(f"Can not retrieve {artifact_path} from test target: {e}")
76
77def list_and_fetch_failed_tests_artifacts(d, tc):
78 artifacts_list = get_artifacts_list(tc.target, d.getVar("TESTIMAGE_FAILED_QA_ARTIFACTS"))
79 if not artifacts_list:
80 bb.warn("Could not load artifacts list, skip artifacts retrieval")
81 else:
82 retrieve_test_artifacts(tc.target, artifacts_list, get_json_result_dir(d))
83
84
85##################################################################
86# General post actions runner
87##################################################################
88
89def run_failed_tests_post_actions(d, tc):
90 post_actions=[
91 create_artifacts_directory,
92 list_and_fetch_failed_tests_artifacts,
93 get_target_disk_usage,
94 get_host_disk_usage
95 ]
96
97 for action in post_actions:
98 action(d, tc)
diff --git a/meta/lib/oeqa/utils/qemurunner.py b/meta/lib/oeqa/utils/qemurunner.py
index 77ec939ad7..cda43aad8c 100644
--- a/meta/lib/oeqa/utils/qemurunner.py
+++ b/meta/lib/oeqa/utils/qemurunner.py
@@ -19,9 +19,11 @@ import errno
19import string 19import string
20import threading 20import threading
21import codecs 21import codecs
22import logging 22import tempfile
23from oeqa.utils.dump import HostDumper
24from collections import defaultdict 23from collections import defaultdict
24from contextlib import contextmanager
25import importlib
26import traceback
25 27
26# Get Unicode non printable control chars 28# Get Unicode non printable control chars
27control_range = list(range(0,32))+list(range(127,160)) 29control_range = list(range(0,32))+list(range(127,160))
@@ -29,10 +31,19 @@ control_chars = [chr(x) for x in control_range
29 if chr(x) not in string.printable] 31 if chr(x) not in string.printable]
30re_control_char = re.compile('[%s]' % re.escape("".join(control_chars))) 32re_control_char = re.compile('[%s]' % re.escape("".join(control_chars)))
31 33
34def getOutput(o):
35 import fcntl
36 fl = fcntl.fcntl(o, fcntl.F_GETFL)
37 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
38 try:
39 return os.read(o.fileno(), 1000000).decode("utf-8")
40 except BlockingIOError:
41 return ""
42
32class QemuRunner: 43class QemuRunner:
33 44
34 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, dump_host_cmds, 45 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, boottime, dump_dir, use_kvm, logger, use_slirp=False,
35 use_kvm, logger, use_slirp=False, serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None): 46 serial_ports=2, boot_patterns = defaultdict(str), use_ovmf=False, workdir=None, tmpfsdir=None):
36 47
37 # Popen object for runqemu 48 # Popen object for runqemu
38 self.runqemu = None 49 self.runqemu = None
@@ -55,21 +66,24 @@ class QemuRunner:
55 self.boottime = boottime 66 self.boottime = boottime
56 self.logged = False 67 self.logged = False
57 self.thread = None 68 self.thread = None
69 self.threadsock = None
58 self.use_kvm = use_kvm 70 self.use_kvm = use_kvm
59 self.use_ovmf = use_ovmf 71 self.use_ovmf = use_ovmf
60 self.use_slirp = use_slirp 72 self.use_slirp = use_slirp
61 self.serial_ports = serial_ports 73 self.serial_ports = serial_ports
62 self.msg = '' 74 self.msg = ''
63 self.boot_patterns = boot_patterns 75 self.boot_patterns = boot_patterns
76 self.tmpfsdir = tmpfsdir
64 77
65 self.runqemutime = 120 78 self.runqemutime = 300
66 if not workdir: 79 if not workdir:
67 workdir = os.getcwd() 80 workdir = os.getcwd()
68 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid()) 81 self.qemu_pidfile = workdir + '/pidfile_' + str(os.getpid())
69 self.host_dumper = HostDumper(dump_host_cmds, dump_dir)
70 self.monitorpipe = None 82 self.monitorpipe = None
71 83
72 self.logger = logger 84 self.logger = logger
85 # Whether we're expecting an exit and should show related errors
86 self.canexit = False
73 87
74 # Enable testing other OS's 88 # Enable testing other OS's
75 # Set commands for target communication, and default to Linux ALWAYS 89 # Set commands for target communication, and default to Linux ALWAYS
@@ -80,7 +94,7 @@ class QemuRunner:
80 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished'] 94 accepted_patterns = ['search_reached_prompt', 'send_login_user', 'search_login_succeeded', 'search_cmd_finished']
81 default_boot_patterns = defaultdict(str) 95 default_boot_patterns = defaultdict(str)
82 # Default to the usual paterns used to communicate with the target 96 # Default to the usual paterns used to communicate with the target
83 default_boot_patterns['search_reached_prompt'] = b' login:' 97 default_boot_patterns['search_reached_prompt'] = ' login:'
84 default_boot_patterns['send_login_user'] = 'root\n' 98 default_boot_patterns['send_login_user'] = 'root\n'
85 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#" 99 default_boot_patterns['search_login_succeeded'] = r"root@[a-zA-Z0-9\-]+:~#"
86 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#" 100 default_boot_patterns['search_cmd_finished'] = r"[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#"
@@ -94,6 +108,7 @@ class QemuRunner:
94 try: 108 try:
95 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 109 sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
96 sock.setblocking(0) 110 sock.setblocking(0)
111 sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
97 sock.bind(("127.0.0.1",0)) 112 sock.bind(("127.0.0.1",0))
98 sock.listen(2) 113 sock.listen(2)
99 port = sock.getsockname()[1] 114 port = sock.getsockname()[1]
@@ -104,30 +119,24 @@ class QemuRunner:
104 sock.close() 119 sock.close()
105 raise 120 raise
106 121
107 def log(self, msg): 122 def decode_qemulog(self, todecode):
108 if self.logfile: 123 # Sanitize the data received from qemu as it may contain control characters
109 # It is needed to sanitize the data received from qemu 124 msg = todecode.decode("utf-8", errors='backslashreplace')
110 # because is possible to have control characters 125 msg = re_control_char.sub('', msg)
111 msg = msg.decode("utf-8", errors='ignore') 126 return msg
112 msg = re_control_char.sub('', msg)
113 self.msg += msg
114 with codecs.open(self.logfile, "a", encoding="utf-8") as f:
115 f.write("%s" % msg)
116
117 def getOutput(self, o):
118 import fcntl
119 fl = fcntl.fcntl(o, fcntl.F_GETFL)
120 fcntl.fcntl(o, fcntl.F_SETFL, fl | os.O_NONBLOCK)
121 return os.read(o.fileno(), 1000000).decode("utf-8")
122 127
128 def log(self, msg, extension=""):
129 if self.logfile:
130 with codecs.open(self.logfile + extension, "ab") as f:
131 f.write(msg)
132 self.msg += self.decode_qemulog(msg)
123 133
124 def handleSIGCHLD(self, signum, frame): 134 def handleSIGCHLD(self, signum, frame):
125 if self.runqemu and self.runqemu.poll(): 135 if self.runqemu and self.runqemu.poll():
126 if self.runqemu.returncode: 136 if self.runqemu.returncode:
127 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode) 137 self.logger.error('runqemu exited with code %d' % self.runqemu.returncode)
128 self.logger.error('Output from runqemu:\n%s' % self.getOutput(self.runqemu.stdout)) 138 self.logger.error('Output from runqemu:\n%s' % getOutput(self.runqemu.stdout))
129 self.stop() 139 self.stop()
130 self._dump_host()
131 140
132 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True): 141 def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True):
133 env = os.environ.copy() 142 env = os.environ.copy()
@@ -150,6 +159,9 @@ class QemuRunner:
150 else: 159 else:
151 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 160 env["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
152 161
162 if self.tmpfsdir:
163 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
164
153 if not launch_cmd: 165 if not launch_cmd:
154 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '') 166 launch_cmd = 'runqemu %s' % ('snapshot' if discard_writes else '')
155 if self.use_kvm: 167 if self.use_kvm:
@@ -163,11 +175,38 @@ class QemuRunner:
163 launch_cmd += ' slirp' 175 launch_cmd += ' slirp'
164 if self.use_ovmf: 176 if self.use_ovmf:
165 launch_cmd += ' ovmf' 177 launch_cmd += ' ovmf'
166 launch_cmd += ' %s %s %s' % (runqemuparams, self.machine, self.rootfs) 178 launch_cmd += ' %s %s' % (runqemuparams, self.machine)
179 if self.rootfs.endswith('.vmdk'):
180 self.logger.debug('Bypassing VMDK rootfs for runqemu')
181 else:
182 launch_cmd += ' %s' % (self.rootfs)
167 183
168 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env) 184 return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams, env=env)
169 185
170 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None): 186 def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None, env = None):
187 # use logfile to determine the recipe-sysroot-native path and
188 # then add in the site-packages path components and add that
189 # to the python sys.path so the qmp module can be found.
190 python_path = os.path.dirname(os.path.dirname(self.logfile))
191 python_path += "/recipe-sysroot-native/usr/lib/qemu-python"
192 sys.path.append(python_path)
193 importlib.invalidate_caches()
194 try:
195 qmp = importlib.import_module("qmp")
196 except Exception as e:
197 self.logger.error("qemurunner: qmp module missing, please ensure it's installed in %s (%s)" % (python_path, str(e)))
198 return False
199 # Path relative to tmpdir used as cwd for qemu below to avoid unix socket path length issues
200 qmp_file = "." + next(tempfile._get_candidate_names())
201 qmp_param = ' -S -qmp unix:./%s,server,wait' % (qmp_file)
202 qmp_port = self.tmpdir + "/" + qmp_file
203 # Create a second socket connection for debugging use,
204 # note this will NOT cause qemu to block waiting for the connection
205 qmp_file2 = "." + next(tempfile._get_candidate_names())
206 qmp_param += ' -qmp unix:./%s,server,nowait' % (qmp_file2)
207 qmp_port2 = self.tmpdir + "/" + qmp_file2
208 self.logger.info("QMP Available for connection at %s" % (qmp_port2))
209
171 try: 210 try:
172 if self.serial_ports >= 2: 211 if self.serial_ports >= 2:
173 self.threadsock, threadport = self.create_socket() 212 self.threadsock, threadport = self.create_socket()
@@ -176,7 +215,7 @@ class QemuRunner:
176 self.logger.error("Failed to create listening socket: %s" % msg[1]) 215 self.logger.error("Failed to create listening socket: %s" % msg[1])
177 return False 216 return False
178 217
179 bootparams = 'console=tty1 console=ttyS0,115200n8 printk.time=1' 218 bootparams = ' printk.time=1'
180 if extra_bootparams: 219 if extra_bootparams:
181 bootparams = bootparams + ' ' + extra_bootparams 220 bootparams = bootparams + ' ' + extra_bootparams
182 221
@@ -184,7 +223,8 @@ class QemuRunner:
184 # and analyze descendents in order to determine it. 223 # and analyze descendents in order to determine it.
185 if os.path.exists(self.qemu_pidfile): 224 if os.path.exists(self.qemu_pidfile):
186 os.remove(self.qemu_pidfile) 225 os.remove(self.qemu_pidfile)
187 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1}"'.format(bootparams, self.qemu_pidfile) 226 self.qemuparams = 'bootparams="{0}" qemuparams="-pidfile {1} {2}"'.format(bootparams, self.qemu_pidfile, qmp_param)
227
188 if qemuparams: 228 if qemuparams:
189 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"' 229 self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"'
190 230
@@ -196,14 +236,15 @@ class QemuRunner:
196 self.origchldhandler = signal.getsignal(signal.SIGCHLD) 236 self.origchldhandler = signal.getsignal(signal.SIGCHLD)
197 signal.signal(signal.SIGCHLD, self.handleSIGCHLD) 237 signal.signal(signal.SIGCHLD, self.handleSIGCHLD)
198 238
199 self.logger.debug('launchcmd=%s'%(launch_cmd)) 239 self.logger.debug('launchcmd=%s' % (launch_cmd))
200 240
201 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty 241 # FIXME: We pass in stdin=subprocess.PIPE here to work around stty
202 # blocking at the end of the runqemu script when using this within 242 # blocking at the end of the runqemu script when using this within
203 # oe-selftest (this makes stty error out immediately). There ought 243 # oe-selftest (this makes stty error out immediately). There ought
204 # to be a proper fix but this will suffice for now. 244 # to be a proper fix but this will suffice for now.
205 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env) 245 self.runqemu = subprocess.Popen(launch_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, preexec_fn=os.setpgrp, env=env, cwd=self.tmpdir)
206 output = self.runqemu.stdout 246 output = self.runqemu.stdout
247 launch_time = time.time()
207 248
208 # 249 #
209 # We need the preexec_fn above so that all runqemu processes can easily be killed 250 # We need the preexec_fn above so that all runqemu processes can easily be killed
@@ -229,30 +270,33 @@ class QemuRunner:
229 r = os.fdopen(r) 270 r = os.fdopen(r)
230 x = r.read() 271 x = r.read()
231 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM) 272 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGTERM)
232 sys.exit(0) 273 os._exit(0)
233 274
234 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid) 275 self.logger.debug("runqemu started, pid is %s" % self.runqemu.pid)
235 self.logger.debug("waiting at most %s seconds for qemu pid (%s)" % 276 self.logger.debug("waiting at most %d seconds for qemu pid (%s)" %
236 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 277 (self.runqemutime, time.strftime("%D %H:%M:%S")))
237 endtime = time.time() + self.runqemutime 278 endtime = time.time() + self.runqemutime
238 while not self.is_alive() and time.time() < endtime: 279 while not self.is_alive() and time.time() < endtime:
239 if self.runqemu.poll(): 280 if self.runqemu.poll():
240 if self.runqemu_exited: 281 if self.runqemu_exited:
282 self.logger.warning("runqemu during is_alive() test")
241 return False 283 return False
242 if self.runqemu.returncode: 284 if self.runqemu.returncode:
243 # No point waiting any longer 285 # No point waiting any longer
244 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode) 286 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
245 self._dump_host() 287 self.logger.warning("Output from runqemu:\n%s" % getOutput(output))
246 self.logger.warning("Output from runqemu:\n%s" % self.getOutput(output))
247 self.stop() 288 self.stop()
248 return False 289 return False
249 time.sleep(0.5) 290 time.sleep(0.5)
250 291
251 if self.runqemu_exited: 292 if self.runqemu_exited:
252 return False 293 self.logger.warning("runqemu after timeout")
294
295 if self.runqemu.returncode:
296 self.logger.warning('runqemu exited with code %d' % self.runqemu.returncode)
253 297
254 if not self.is_alive(): 298 if not self.is_alive():
255 self.logger.error("Qemu pid didn't appear in %s seconds (%s)" % 299 self.logger.error("Qemu pid didn't appear in %d seconds (%s)" %
256 (self.runqemutime, time.strftime("%D %H:%M:%S"))) 300 (self.runqemutime, time.strftime("%D %H:%M:%S")))
257 301
258 qemu_pid = None 302 qemu_pid = None
@@ -267,8 +311,7 @@ class QemuRunner:
267 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0] 311 ps = subprocess.Popen(['ps', 'axww', '-o', 'pid,ppid,pri,ni,command '], stdout=subprocess.PIPE).communicate()[0]
268 processes = ps.decode("utf-8") 312 processes = ps.decode("utf-8")
269 self.logger.debug("Running processes:\n%s" % processes) 313 self.logger.debug("Running processes:\n%s" % processes)
270 self._dump_host() 314 op = getOutput(output)
271 op = self.getOutput(output)
272 self.stop() 315 self.stop()
273 if op: 316 if op:
274 self.logger.error("Output from runqemu:\n%s" % op) 317 self.logger.error("Output from runqemu:\n%s" % op)
@@ -276,10 +319,79 @@ class QemuRunner:
276 self.logger.error("No output from runqemu.\n") 319 self.logger.error("No output from runqemu.\n")
277 return False 320 return False
278 321
322 # Create the client socket for the QEMU Monitor Control Socket
323 # This will allow us to read status from Qemu if the the process
324 # is still alive
325 self.logger.debug("QMP Initializing to %s" % (qmp_port))
326 # chdir dance for path length issues with unix sockets
327 origpath = os.getcwd()
328 try:
329 os.chdir(os.path.dirname(qmp_port))
330 try:
331 from qmp.legacy import QEMUMonitorProtocol
332 self.qmp = QEMUMonitorProtocol(os.path.basename(qmp_port))
333 except OSError as msg:
334 self.logger.warning("Failed to initialize qemu monitor socket: %s File: %s" % (msg, msg.filename))
335 return False
336
337 self.logger.debug("QMP Connecting to %s" % (qmp_port))
338 if not os.path.exists(qmp_port) and self.is_alive():
339 self.logger.debug("QMP Port does not exist waiting for it to be created")
340 endtime = time.time() + self.runqemutime
341 while not os.path.exists(qmp_port) and self.is_alive() and time.time() < endtime:
342 self.logger.info("QMP port does not exist yet!")
343 time.sleep(0.5)
344 if not os.path.exists(qmp_port) and self.is_alive():
345 self.logger.warning("QMP Port still does not exist but QEMU is alive")
346 return False
347
348 try:
349 # set timeout value for all QMP calls
350 self.qmp.settimeout(self.runqemutime)
351 self.qmp.connect()
352 connect_time = time.time()
353 self.logger.info("QMP connected to QEMU at %s and took %.2f seconds" %
354 (time.strftime("%D %H:%M:%S"),
355 time.time() - launch_time))
356 except OSError as msg:
357 self.logger.warning("Failed to connect qemu monitor socket: %s File: %s" % (msg, msg.filename))
358 return False
359 except qmp.legacy.QMPError as msg:
360 self.logger.warning("Failed to communicate with qemu monitor: %s" % (msg))
361 return False
362 finally:
363 os.chdir(origpath)
364
365 # We worry that mmap'd libraries may cause page faults which hang the qemu VM for periods
366 # causing failures. Before we "start" qemu, read through it's mapped files to try and
367 # ensure we don't hit page faults later
368 mapdir = "/proc/" + str(self.qemupid) + "/map_files/"
369 try:
370 for f in os.listdir(mapdir):
371 try:
372 linktarget = os.readlink(os.path.join(mapdir, f))
373 if not linktarget.startswith("/") or linktarget.startswith("/dev") or "deleted" in linktarget:
374 continue
375 with open(linktarget, "rb") as readf:
376 data = True
377 while data:
378 data = readf.read(4096)
379 except FileNotFoundError:
380 continue
381 # Centos7 doesn't allow us to read /map_files/
382 except PermissionError:
383 pass
384
385 # Release the qemu process to continue running
386 self.run_monitor('cont')
387 self.logger.info("QMP released QEMU at %s and took %.2f seconds from connect" %
388 (time.strftime("%D %H:%M:%S"),
389 time.time() - connect_time))
390
279 # We are alive: qemu is running 391 # We are alive: qemu is running
280 out = self.getOutput(output) 392 out = getOutput(output)
281 netconf = False # network configuration is not required by default 393 netconf = False # network configuration is not required by default
282 self.logger.debug("qemu started in %s seconds - qemu procces pid is %s (%s)" % 394 self.logger.debug("qemu started in %.2f seconds - qemu procces pid is %s (%s)" %
283 (time.time() - (endtime - self.runqemutime), 395 (time.time() - (endtime - self.runqemutime),
284 self.qemupid, time.strftime("%D %H:%M:%S"))) 396 self.qemupid, time.strftime("%D %H:%M:%S")))
285 cmdline = '' 397 cmdline = ''
@@ -291,9 +403,10 @@ class QemuRunner:
291 cmdline = re_control_char.sub(' ', cmdline) 403 cmdline = re_control_char.sub(' ', cmdline)
292 try: 404 try:
293 if self.use_slirp: 405 if self.use_slirp:
294 tcp_ports = cmdline.split("hostfwd=tcp::")[1] 406 tcp_ports = cmdline.split("hostfwd=tcp:")[1]
407 ip, tcp_ports = tcp_ports.split(":")[:2]
295 host_port = tcp_ports[:tcp_ports.find('-')] 408 host_port = tcp_ports[:tcp_ports.find('-')]
296 self.ip = "localhost:%s" % host_port 409 self.ip = "%s:%s" % (ip, host_port)
297 else: 410 else:
298 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1]) 411 ips = re.findall(r"((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1])
299 self.ip = ips[0] 412 self.ip = ips[0]
@@ -301,8 +414,8 @@ class QemuRunner:
301 self.logger.debug("qemu cmdline used:\n{}".format(cmdline)) 414 self.logger.debug("qemu cmdline used:\n{}".format(cmdline))
302 except (IndexError, ValueError): 415 except (IndexError, ValueError):
303 # Try to get network configuration from runqemu output 416 # Try to get network configuration from runqemu output
304 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+)$.*', 417 match = re.match(r'.*Network configuration: (?:ip=)*([0-9.]+)::([0-9.]+):([0-9.]+).*',
305 out, re.MULTILINE|re.DOTALL) 418 out, re.MULTILINE | re.DOTALL)
306 if match: 419 if match:
307 self.ip, self.server_ip, self.netmask = match.groups() 420 self.ip, self.server_ip, self.netmask = match.groups()
308 # network configuration is required as we couldn't get it 421 # network configuration is required as we couldn't get it
@@ -313,16 +426,16 @@ class QemuRunner:
313 self.logger.error("Couldn't get ip from qemu command line and runqemu output! " 426 self.logger.error("Couldn't get ip from qemu command line and runqemu output! "
314 "Here is the qemu command line used:\n%s\n" 427 "Here is the qemu command line used:\n%s\n"
315 "and output from runqemu:\n%s" % (cmdline, out)) 428 "and output from runqemu:\n%s" % (cmdline, out))
316 self._dump_host()
317 self.stop() 429 self.stop()
318 return False 430 return False
319 431
320 self.logger.debug("Target IP: %s" % self.ip) 432 self.logger.debug("Target IP: %s" % self.ip)
321 self.logger.debug("Server IP: %s" % self.server_ip) 433 self.logger.debug("Server IP: %s" % self.server_ip)
322 434
435 self.thread = LoggingThread(self.log, self.threadsock, self.logger, self.runqemu.stdout)
436 self.thread.start()
437
323 if self.serial_ports >= 2: 438 if self.serial_ports >= 2:
324 self.thread = LoggingThread(self.log, self.threadsock, self.logger)
325 self.thread.start()
326 if not self.thread.connection_established.wait(self.boottime): 439 if not self.thread.connection_established.wait(self.boottime):
327 self.logger.error("Didn't receive a console connection from qemu. " 440 self.logger.error("Didn't receive a console connection from qemu. "
328 "Here is the qemu command line used:\n%s\nand " 441 "Here is the qemu command line used:\n%s\nand "
@@ -334,7 +447,7 @@ class QemuRunner:
334 self.logger.debug("Waiting at most %d seconds for login banner (%s)" % 447 self.logger.debug("Waiting at most %d seconds for login banner (%s)" %
335 (self.boottime, time.strftime("%D %H:%M:%S"))) 448 (self.boottime, time.strftime("%D %H:%M:%S")))
336 endtime = time.time() + self.boottime 449 endtime = time.time() + self.boottime
337 socklist = [self.server_socket] 450 filelist = [self.server_socket]
338 reachedlogin = False 451 reachedlogin = False
339 stopread = False 452 stopread = False
340 qemusock = None 453 qemusock = None
@@ -342,61 +455,82 @@ class QemuRunner:
342 data = b'' 455 data = b''
343 while time.time() < endtime and not stopread: 456 while time.time() < endtime and not stopread:
344 try: 457 try:
345 sread, swrite, serror = select.select(socklist, [], [], 5) 458 sread, swrite, serror = select.select(filelist, [], [], 5)
346 except InterruptedError: 459 except InterruptedError:
347 continue 460 continue
348 for sock in sread: 461 for file in sread:
349 if sock is self.server_socket: 462 if file is self.server_socket:
350 qemusock, addr = self.server_socket.accept() 463 qemusock, addr = self.server_socket.accept()
351 qemusock.setblocking(0) 464 qemusock.setblocking(False)
352 socklist.append(qemusock) 465 filelist.append(qemusock)
353 socklist.remove(self.server_socket) 466 filelist.remove(self.server_socket)
354 self.logger.debug("Connection from %s:%s" % addr) 467 self.logger.debug("Connection from %s:%s" % addr)
355 else: 468 else:
356 data = data + sock.recv(1024) 469 # try to avoid reading only a single character at a time
470 time.sleep(0.1)
471 if hasattr(file, 'read'):
472 read = file.read(1024)
473 elif hasattr(file, 'recv'):
474 read = file.recv(1024)
475 else:
476 self.logger.error('Invalid file type: %s\n%s' % (file))
477 read = b''
478
479 self.logger.debug2('Partial boot log:\n%s' % (read.decode('utf-8', errors='backslashreplace')))
480 data = data + read
357 if data: 481 if data:
358 bootlog += data 482 bootlog += data
359 if self.serial_ports < 2: 483 self.log(data, extension = ".2")
360 # this socket has mixed console/kernel data, log it to logfile
361 self.log(data)
362
363 data = b'' 484 data = b''
364 if self.boot_patterns['search_reached_prompt'] in bootlog: 485
486 if bytes(self.boot_patterns['search_reached_prompt'], 'utf-8') in bootlog:
487 self.server_socket.close()
365 self.server_socket = qemusock 488 self.server_socket = qemusock
366 stopread = True 489 stopread = True
367 reachedlogin = True 490 reachedlogin = True
368 self.logger.debug("Reached login banner in %s seconds (%s)" % 491 self.logger.debug("Reached login banner in %.2f seconds (%s)" %
369 (time.time() - (endtime - self.boottime), 492 (time.time() - (endtime - self.boottime),
370 time.strftime("%D %H:%M:%S"))) 493 time.strftime("%D %H:%M:%S")))
371 else: 494 else:
372 # no need to check if reachedlogin unless we support multiple connections 495 # no need to check if reachedlogin unless we support multiple connections
373 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" % 496 self.logger.debug("QEMU socket disconnected before login banner reached. (%s)" %
374 time.strftime("%D %H:%M:%S")) 497 time.strftime("%D %H:%M:%S"))
375 socklist.remove(sock) 498 filelist.remove(file)
376 sock.close() 499 file.close()
377 stopread = True 500 stopread = True
378 501
379
380 if not reachedlogin: 502 if not reachedlogin:
381 if time.time() >= endtime: 503 if time.time() >= endtime:
382 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" % 504 self.logger.warning("Target didn't reach login banner in %d seconds (%s)" %
383 (self.boottime, time.strftime("%D %H:%M:%S"))) 505 (self.boottime, time.strftime("%D %H:%M:%S")))
384 tail = lambda l: "\n".join(l.splitlines()[-25:]) 506 tail = lambda l: "\n".join(l.splitlines()[-25:])
385 bootlog = bootlog.decode("utf-8") 507 bootlog = self.decode_qemulog(bootlog)
386 # in case bootlog is empty, use tail qemu log store at self.msg 508 self.logger.warning("Last 25 lines of login console (%d):\n%s" % (len(bootlog), tail(bootlog)))
387 lines = tail(bootlog if bootlog else self.msg) 509 self.logger.warning("Last 25 lines of all logging (%d):\n%s" % (len(self.msg), tail(self.msg)))
388 self.logger.warning("Last 25 lines of text:\n%s" % lines)
389 self.logger.warning("Check full boot log: %s" % self.logfile) 510 self.logger.warning("Check full boot log: %s" % self.logfile)
390 self._dump_host()
391 self.stop() 511 self.stop()
512 data = True
513 while data:
514 try:
515 time.sleep(1)
516 data = qemusock.recv(1024)
517 self.log(data, extension = ".2")
518 self.logger.warning('Extra log data read: %s\n' % (data.decode('utf-8', errors='backslashreplace')))
519 except Exception as e:
520 self.logger.warning('Extra log data exception %s' % repr(e))
521 data = None
522 self.thread.serial_lock.release()
392 return False 523 return False
393 524
525 with self.thread.serial_lock:
526 self.thread.set_serialsock(self.server_socket)
527
394 # If we are not able to login the tests can continue 528 # If we are not able to login the tests can continue
395 try: 529 try:
396 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120) 530 (status, output) = self.run_serial(self.boot_patterns['send_login_user'], raw=True, timeout=120)
397 if re.search(self.boot_patterns['search_login_succeeded'], output): 531 if re.search(self.boot_patterns['search_login_succeeded'], output):
398 self.logged = True 532 self.logged = True
399 self.logger.debug("Logged as root in serial console") 533 self.logger.debug("Logged in as %s in serial console" % self.boot_patterns['send_login_user'].replace("\n", ""))
400 if netconf: 534 if netconf:
401 # configure guest networking 535 # configure guest networking
402 cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask) 536 cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask)
@@ -407,7 +541,7 @@ class QemuRunner:
407 self.logger.debug("Couldn't configure guest networking") 541 self.logger.debug("Couldn't configure guest networking")
408 else: 542 else:
409 self.logger.warning("Couldn't login into serial console" 543 self.logger.warning("Couldn't login into serial console"
410 " as root using blank password") 544 " as %s using blank password" % self.boot_patterns['send_login_user'].replace("\n", ""))
411 self.logger.warning("The output:\n%s" % output) 545 self.logger.warning("The output:\n%s" % output)
412 except: 546 except:
413 self.logger.warning("Serial console failed while trying to login") 547 self.logger.warning("Serial console failed while trying to login")
@@ -427,16 +561,24 @@ class QemuRunner:
427 except OSError as e: 561 except OSError as e:
428 if e.errno != errno.ESRCH: 562 if e.errno != errno.ESRCH:
429 raise 563 raise
430 endtime = time.time() + self.runqemutime 564 try:
431 while self.runqemu.poll() is None and time.time() < endtime: 565 outs, errs = self.runqemu.communicate(timeout=self.runqemutime)
432 time.sleep(1) 566 if outs:
433 if self.runqemu.poll() is None: 567 self.logger.info("Output from runqemu:\n%s", outs.decode("utf-8"))
568 if errs:
569 self.logger.info("Stderr from runqemu:\n%s", errs.decode("utf-8"))
570 except subprocess.TimeoutExpired:
434 self.logger.debug("Sending SIGKILL to runqemu") 571 self.logger.debug("Sending SIGKILL to runqemu")
435 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL) 572 os.killpg(os.getpgid(self.runqemu.pid), signal.SIGKILL)
573 if not self.runqemu.stdout.closed:
574 self.logger.info("Output from runqemu:\n%s" % getOutput(self.runqemu.stdout))
436 self.runqemu.stdin.close() 575 self.runqemu.stdin.close()
437 self.runqemu.stdout.close() 576 self.runqemu.stdout.close()
438 self.runqemu_exited = True 577 self.runqemu_exited = True
439 578
579 if hasattr(self, 'qmp') and self.qmp:
580 self.qmp.close()
581 self.qmp = None
440 if hasattr(self, 'server_socket') and self.server_socket: 582 if hasattr(self, 'server_socket') and self.server_socket:
441 self.server_socket.close() 583 self.server_socket.close()
442 self.server_socket = None 584 self.server_socket = None
@@ -467,6 +609,11 @@ class QemuRunner:
467 self.thread.stop() 609 self.thread.stop()
468 self.thread.join() 610 self.thread.join()
469 611
612 def allowexit(self):
613 self.canexit = True
614 if self.thread:
615 self.thread.allowexit()
616
470 def restart(self, qemuparams = None): 617 def restart(self, qemuparams = None):
471 self.logger.warning("Restarting qemu process") 618 self.logger.warning("Restarting qemu process")
472 if self.runqemu.poll() is None: 619 if self.runqemu.poll() is None:
@@ -483,8 +630,12 @@ class QemuRunner:
483 # so it's possible that the file has been created but the content is empty 630 # so it's possible that the file has been created but the content is empty
484 pidfile_timeout = time.time() + 3 631 pidfile_timeout = time.time() + 3
485 while time.time() < pidfile_timeout: 632 while time.time() < pidfile_timeout:
486 with open(self.qemu_pidfile, 'r') as f: 633 try:
487 qemu_pid = f.read().strip() 634 with open(self.qemu_pidfile, 'r') as f:
635 qemu_pid = f.read().strip()
636 except FileNotFoundError:
637 # Can be used to detect shutdown so the pid file can disappear
638 return False
488 # file created but not yet written contents 639 # file created but not yet written contents
489 if not qemu_pid: 640 if not qemu_pid:
490 time.sleep(0.5) 641 time.sleep(0.5)
@@ -495,34 +646,49 @@ class QemuRunner:
495 return True 646 return True
496 return False 647 return False
497 648
649 def run_monitor(self, command, args=None, timeout=60):
650 if hasattr(self, 'qmp') and self.qmp:
651 self.qmp.settimeout(timeout)
652 if args is not None:
653 return self.qmp.cmd_raw(command, args)
654 else:
655 return self.qmp.cmd_raw(command)
656
498 def run_serial(self, command, raw=False, timeout=60): 657 def run_serial(self, command, raw=False, timeout=60):
658 # Returns (status, output) where status is 1 on success and 0 on error
659
499 # We assume target system have echo to get command status 660 # We assume target system have echo to get command status
500 if not raw: 661 if not raw:
501 command = "%s; echo $?\n" % command 662 command = "%s; echo $?\n" % command
502 663
503 data = '' 664 data = ''
504 status = 0 665 status = 0
505 self.server_socket.sendall(command.encode('utf-8')) 666 with self.thread.serial_lock:
506 start = time.time() 667 self.server_socket.sendall(command.encode('utf-8'))
507 end = start + timeout 668 start = time.time()
508 while True: 669 end = start + timeout
509 now = time.time() 670 while True:
510 if now >= end: 671 now = time.time()
511 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout 672 if now >= end:
512 break 673 data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout
513 try: 674 break
514 sread, _, _ = select.select([self.server_socket],[],[], end - now) 675 try:
515 except InterruptedError: 676 sread, _, _ = select.select([self.server_socket],[],[], end - now)
516 continue 677 except InterruptedError:
517 if sread: 678 continue
518 answer = self.server_socket.recv(1024) 679 if sread:
519 if answer: 680 # try to avoid reading single character at a time
520 data += answer.decode('utf-8') 681 time.sleep(0.1)
521 # Search the prompt to stop 682 answer = self.server_socket.recv(1024)
522 if re.search(self.boot_patterns['search_cmd_finished'], data): 683 if answer:
523 break 684 data += answer.decode('utf-8')
524 else: 685 # Search the prompt to stop
525 raise Exception("No data on serial console socket") 686 if re.search(self.boot_patterns['search_cmd_finished'], data):
687 break
688 else:
689 if self.canexit:
690 return (1, "")
691 raise Exception("No data on serial console socket, connection closed?")
526 692
527 if data: 693 if data:
528 if raw: 694 if raw:
@@ -541,34 +707,46 @@ class QemuRunner:
541 status = 1 707 status = 1
542 return (status, str(data)) 708 return (status, str(data))
543 709
544 710@contextmanager
545 def _dump_host(self): 711def nonblocking_lock(lock):
546 self.host_dumper.create_dir("qemu") 712 locked = lock.acquire(False)
547 self.logger.warning("Qemu ended unexpectedly, dump data from host" 713 try:
548 " is in %s" % self.host_dumper.dump_dir) 714 yield locked
549 self.host_dumper.dump_host() 715 finally:
716 if locked:
717 lock.release()
550 718
551# This class is for reading data from a socket and passing it to logfunc 719# This class is for reading data from a socket and passing it to logfunc
552# to be processed. It's completely event driven and has a straightforward 720# to be processed. It's completely event driven and has a straightforward
553# event loop. The mechanism for stopping the thread is a simple pipe which 721# event loop. The mechanism for stopping the thread is a simple pipe which
554# will wake up the poll and allow for tearing everything down. 722# will wake up the poll and allow for tearing everything down.
555class LoggingThread(threading.Thread): 723class LoggingThread(threading.Thread):
556 def __init__(self, logfunc, sock, logger): 724 def __init__(self, logfunc, sock, logger, qemuoutput):
557 self.connection_established = threading.Event() 725 self.connection_established = threading.Event()
726 self.serial_lock = threading.Lock()
727
558 self.serversock = sock 728 self.serversock = sock
729 self.serialsock = None
730 self.qemuoutput = qemuoutput
559 self.logfunc = logfunc 731 self.logfunc = logfunc
560 self.logger = logger 732 self.logger = logger
561 self.readsock = None 733 self.readsock = None
562 self.running = False 734 self.running = False
735 self.canexit = False
563 736
564 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL 737 self.errorevents = select.POLLERR | select.POLLHUP | select.POLLNVAL
565 self.readevents = select.POLLIN | select.POLLPRI 738 self.readevents = select.POLLIN | select.POLLPRI
566 739
567 threading.Thread.__init__(self, target=self.threadtarget) 740 threading.Thread.__init__(self, target=self.threadtarget)
568 741
742 def set_serialsock(self, serialsock):
743 self.serialsock = serialsock
744
569 def threadtarget(self): 745 def threadtarget(self):
570 try: 746 try:
571 self.eventloop() 747 self.eventloop()
748 except Exception as e:
749 self.logger.warning("Exception %s in logging thread" % traceback.format_exception(e))
572 finally: 750 finally:
573 self.teardown() 751 self.teardown()
574 752
@@ -584,7 +762,8 @@ class LoggingThread(threading.Thread):
584 762
585 def teardown(self): 763 def teardown(self):
586 self.logger.debug("Tearing down logging thread") 764 self.logger.debug("Tearing down logging thread")
587 self.close_socket(self.serversock) 765 if self.serversock:
766 self.close_socket(self.serversock)
588 767
589 if self.readsock is not None: 768 if self.readsock is not None:
590 self.close_socket(self.readsock) 769 self.close_socket(self.readsock)
@@ -593,30 +772,37 @@ class LoggingThread(threading.Thread):
593 self.close_ignore_error(self.writepipe) 772 self.close_ignore_error(self.writepipe)
594 self.running = False 773 self.running = False
595 774
775 def allowexit(self):
776 self.canexit = True
777
596 def eventloop(self): 778 def eventloop(self):
597 poll = select.poll() 779 poll = select.poll()
598 event_read_mask = self.errorevents | self.readevents 780 event_read_mask = self.errorevents | self.readevents
599 poll.register(self.serversock.fileno()) 781 if self.serversock:
782 poll.register(self.serversock.fileno())
783 serial_registered = False
784 poll.register(self.qemuoutput.fileno())
600 poll.register(self.readpipe, event_read_mask) 785 poll.register(self.readpipe, event_read_mask)
601 786
602 breakout = False 787 breakout = False
603 self.running = True 788 self.running = True
604 self.logger.debug("Starting thread event loop") 789 self.logger.debug("Starting thread event loop")
605 while not breakout: 790 while not breakout:
606 events = poll.poll() 791 events = poll.poll(2)
607 for event in events: 792 for fd, event in events:
793
608 # An error occurred, bail out 794 # An error occurred, bail out
609 if event[1] & self.errorevents: 795 if event & self.errorevents:
610 raise Exception(self.stringify_event(event[1])) 796 raise Exception(self.stringify_event(event))
611 797
612 # Event to stop the thread 798 # Event to stop the thread
613 if self.readpipe == event[0]: 799 if self.readpipe == fd:
614 self.logger.debug("Stop event received") 800 self.logger.debug("Stop event received")
615 breakout = True 801 breakout = True
616 break 802 break
617 803
618 # A connection request was received 804 # A connection request was received
619 elif self.serversock.fileno() == event[0]: 805 elif self.serversock and self.serversock.fileno() == fd:
620 self.logger.debug("Connection request received") 806 self.logger.debug("Connection request received")
621 self.readsock, _ = self.serversock.accept() 807 self.readsock, _ = self.serversock.accept()
622 self.readsock.setblocking(0) 808 self.readsock.setblocking(0)
@@ -627,18 +813,38 @@ class LoggingThread(threading.Thread):
627 self.connection_established.set() 813 self.connection_established.set()
628 814
629 # Actual data to be logged 815 # Actual data to be logged
630 elif self.readsock.fileno() == event[0]: 816 elif self.readsock and self.readsock.fileno() == fd:
631 data = self.recv(1024) 817 data = self.recv(1024, self.readsock)
632 self.logfunc(data) 818 self.logfunc(data)
819 elif self.qemuoutput.fileno() == fd:
820 data = self.qemuoutput.read()
821 self.logger.debug("Data received on qemu stdout %s" % data)
822 self.logfunc(data, ".stdout")
823 elif self.serialsock and self.serialsock.fileno() == fd:
824 if self.serial_lock.acquire(blocking=False):
825 data = self.recv(1024, self.serialsock)
826 self.logger.debug("Data received serial thread %s" % data.decode('utf-8', 'replace'))
827 self.logfunc(data, ".2")
828 self.serial_lock.release()
829 else:
830 serial_registered = False
831 poll.unregister(self.serialsock.fileno())
832
833 if not serial_registered and self.serialsock:
834 with nonblocking_lock(self.serial_lock) as l:
835 if l:
836 serial_registered = True
837 poll.register(self.serialsock.fileno(), event_read_mask)
838
633 839
634 # Since the socket is non-blocking make sure to honor EAGAIN 840 # Since the socket is non-blocking make sure to honor EAGAIN
635 # and EWOULDBLOCK. 841 # and EWOULDBLOCK.
636 def recv(self, count): 842 def recv(self, count, sock):
637 try: 843 try:
638 data = self.readsock.recv(count) 844 data = sock.recv(count)
639 except socket.error as e: 845 except socket.error as e:
640 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK: 846 if e.errno == errno.EAGAIN or e.errno == errno.EWOULDBLOCK:
641 return '' 847 return b''
642 else: 848 else:
643 raise 849 raise
644 850
@@ -649,7 +855,9 @@ class LoggingThread(threading.Thread):
649 # happened. But for this code it counts as an 855 # happened. But for this code it counts as an
650 # error since the connection shouldn't go away 856 # error since the connection shouldn't go away
651 # until qemu exits. 857 # until qemu exits.
652 raise Exception("Console connection closed unexpectedly") 858 if not self.canexit:
859 raise Exception("Console connection closed unexpectedly")
860 return b''
653 861
654 return data 862 return data
655 863
@@ -661,6 +869,9 @@ class LoggingThread(threading.Thread):
661 val = 'POLLHUP' 869 val = 'POLLHUP'
662 elif select.POLLNVAL == event: 870 elif select.POLLNVAL == event:
663 val = 'POLLNVAL' 871 val = 'POLLNVAL'
872 else:
873 val = "0x%x" % (event)
874
664 return val 875 return val
665 876
666 def close_socket(self, sock): 877 def close_socket(self, sock):
diff --git a/meta/lib/oeqa/utils/qemutinyrunner.py b/meta/lib/oeqa/utils/qemutinyrunner.py
index 5c92941c0a..20009401ca 100644
--- a/meta/lib/oeqa/utils/qemutinyrunner.py
+++ b/meta/lib/oeqa/utils/qemutinyrunner.py
@@ -19,7 +19,7 @@ from .qemurunner import QemuRunner
19 19
20class QemuTinyRunner(QemuRunner): 20class QemuTinyRunner(QemuRunner):
21 21
22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger): 22 def __init__(self, machine, rootfs, display, tmpdir, deploy_dir_image, logfile, kernel, boottime, logger, tmpfsdir=None):
23 23
24 # Popen object for runqemu 24 # Popen object for runqemu
25 self.runqemu = None 25 self.runqemu = None
@@ -37,6 +37,7 @@ class QemuTinyRunner(QemuRunner):
37 self.deploy_dir_image = deploy_dir_image 37 self.deploy_dir_image = deploy_dir_image
38 self.logfile = logfile 38 self.logfile = logfile
39 self.boottime = boottime 39 self.boottime = boottime
40 self.tmpfsdir = tmpfsdir
40 41
41 self.runqemutime = 60 42 self.runqemutime = 60
42 self.socketfile = "console.sock" 43 self.socketfile = "console.sock"
@@ -83,6 +84,9 @@ class QemuTinyRunner(QemuRunner):
83 return False 84 return False
84 else: 85 else:
85 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image 86 os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image
87 if self.tmpfsdir:
88 env["RUNQEMU_TMPFS_DIR"] = self.tmpfsdir
89
86 90
87 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact 91 # Set this flag so that Qemu doesn't do any grabs as SDL grabs interact
88 # badly with screensavers. 92 # badly with screensavers.
diff --git a/meta/lib/oeqa/utils/subprocesstweak.py b/meta/lib/oeqa/utils/subprocesstweak.py
index b47975a4bc..3e43ed547b 100644
--- a/meta/lib/oeqa/utils/subprocesstweak.py
+++ b/meta/lib/oeqa/utils/subprocesstweak.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright OpenEmbedded Contributors
3#
2# SPDX-License-Identifier: MIT 4# SPDX-License-Identifier: MIT
3# 5#
4import subprocess 6import subprocess
diff --git a/meta/lib/oeqa/utils/targetbuild.py b/meta/lib/oeqa/utils/targetbuild.py
index 1055810ca3..09738add1d 100644
--- a/meta/lib/oeqa/utils/targetbuild.py
+++ b/meta/lib/oeqa/utils/targetbuild.py
@@ -19,6 +19,7 @@ class BuildProject(metaclass=ABCMeta):
19 self.d = d 19 self.d = d
20 self.uri = uri 20 self.uri = uri
21 self.archive = os.path.basename(uri) 21 self.archive = os.path.basename(uri)
22 self.tempdirobj = None
22 if not tmpdir: 23 if not tmpdir:
23 tmpdir = self.d.getVar('WORKDIR') 24 tmpdir = self.d.getVar('WORKDIR')
24 if not tmpdir: 25 if not tmpdir:
@@ -71,9 +72,10 @@ class BuildProject(metaclass=ABCMeta):
71 return self._run('cd %s; make install %s' % (self.targetdir, install_args)) 72 return self._run('cd %s; make install %s' % (self.targetdir, install_args))
72 73
73 def clean(self): 74 def clean(self):
75 if self.tempdirobj:
76 self.tempdirobj.cleanup()
74 self._run('rm -rf %s' % self.targetdir) 77 self._run('rm -rf %s' % self.targetdir)
75 subprocess.check_call('rm -f %s' % self.localarchive, shell=True) 78 subprocess.check_call('rm -f %s' % self.localarchive, shell=True)
76 pass
77 79
78class TargetBuildProject(BuildProject): 80class TargetBuildProject(BuildProject):
79 81